repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/jackrabbit | 35,999 | jackrabbit-core/src/main/java/org/apache/jackrabbit/core/cluster/ClusterNode.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.cluster;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.jcr.RepositoryException;
import org.apache.jackrabbit.core.cluster.WorkspaceRecord.CreateWorkspaceAction;
import org.apache.jackrabbit.core.config.ClusterConfig;
import org.apache.jackrabbit.core.id.NodeId;
import org.apache.jackrabbit.core.journal.AbstractJournal;
import org.apache.jackrabbit.core.journal.InstanceRevision;
import org.apache.jackrabbit.core.journal.Journal;
import org.apache.jackrabbit.core.journal.JournalException;
import org.apache.jackrabbit.core.journal.Record;
import org.apache.jackrabbit.core.journal.RecordConsumer;
import org.apache.jackrabbit.core.journal.RecordProducer;
import org.apache.jackrabbit.core.nodetype.InvalidNodeTypeDefException;
import org.apache.jackrabbit.core.observation.EventState;
import org.apache.jackrabbit.core.state.ChangeLog;
import org.apache.jackrabbit.core.version.InternalVersionManagerImpl;
import org.apache.jackrabbit.core.xml.ClonedInputSource;
import org.apache.jackrabbit.spi.PrivilegeDefinition;
import org.apache.jackrabbit.spi.QNodeTypeDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import EDU.oswego.cs.dl.util.concurrent.Latch;
import EDU.oswego.cs.dl.util.concurrent.Mutex;
/**
* Default clustered node implementation.
*/
public class ClusterNode implements Runnable,
NamespaceEventChannel, NodeTypeEventChannel, RecordConsumer,
ClusterRecordProcessor, WorkspaceEventChannel, PrivilegeEventChannel {
/**
* System property specifying a node id to use.
*/
public static final String SYSTEM_PROPERTY_NODE_ID = "org.apache.jackrabbit.core.cluster.node_id";
/**
* Producer identifier.
*/
private static final String PRODUCER_ID = "JR";
/**
* Status constant.
*/
private static final int NONE = 0;
/**
* Status constant.
*/
private static final int STARTED = 1;
/**
* Status constant.
*/
private static final int STOPPED = 2;
/**
* Audit logger.
*/
private static Logger auditLogger = LoggerFactory.getLogger("org.apache.jackrabbit.core.audit");
/**
* Default Logger.
*/
private static Logger log = LoggerFactory.getLogger(ClusterNode.class);
/**
* Cluster context.
*/
private ClusterContext clusterContext;
/**
* Cluster node id.
*/
private String clusterNodeId;
/**
* Synchronization delay, in milliseconds.
*/
private long syncDelay;
/**
* Stop delay, in milliseconds.
*/
private long stopDelay;
/**
* Journal used.
*/
private Journal journal;
/**
* Synchronization thread.
*/
private Thread syncThread;
/**
* Mutex used when syncing.
*/
private final Mutex syncLock = new Mutex();
/**
* Update counter, used in displaying the number of updates in audit log.
*/
private final AtomicInteger updateCount = new AtomicInteger();
/**
* Latch used to communicate a stop request to the synchronization thread.
*/
private final Latch stopLatch = new Latch();
/**
* Sync counter, used to avoid repeated sync() calls from piling up.
* Only updated within the critical section guarded by {@link #syncLock}.
*
* @since Apache Jackrabbit 1.6
* @see <a href="https://issues.apache.org/jira/browse/JCR-1753">JCR-1753</a>
*/
private AtomicInteger syncCount = new AtomicInteger();
/**
* Status flag, one of {@link #NONE}, {@link #STARTED} or {@link #STOPPED}.
*/
private int status;
/**
* Map of available lock listeners, indexed by workspace name.
*/
private final Map<String, LockEventListener> wspLockListeners = new HashMap<String, LockEventListener>();
/**
* Map of available update listeners, indexed by workspace name.
*/
private final Map<String, UpdateEventListener> wspUpdateListeners = new HashMap<String, UpdateEventListener>();
/**
* Versioning update listener.
*/
private UpdateEventListener versionUpdateListener;
/**
* Namespace listener.
*/
private NamespaceEventListener namespaceListener;
/**
* Create workspace listener
*/
private WorkspaceListener createWorkspaceListener;
/**
* Node type listener.
*/
private NodeTypeEventListener nodeTypeListener;
/**
* Privilege listener.
*/
private PrivilegeEventListener privilegeListener;
/**
* Instance revision manager.
*/
private InstanceRevision instanceRevision;
/**
* Our record producer.
*/
private RecordProducer producer;
/**
* Record deserializer.
*/
private ClusterRecordDeserializer deserializer = new ClusterRecordDeserializer();
/**
* Flag indicating whether sync is manual.
*/
private boolean disableAutoSync;
/**
* Initialize this cluster node.
*
* @param clusterContext The cluster context.
* @throws ClusterException if an error occurs
*/
public void init(ClusterContext clusterContext) throws ClusterException {
this.clusterContext = clusterContext;
init();
}
/**
* Initialize this cluster node (overridable).
*
* @throws ClusterException if an error occurs
*/
protected void init() throws ClusterException {
ClusterConfig cc = clusterContext.getClusterConfig();
clusterNodeId = cc.getId();
syncDelay = cc.getSyncDelay();
stopDelay = cc.getStopDelay();
try {
journal = cc.getJournal(clusterContext.getNamespaceResolver());
instanceRevision = journal.getInstanceRevision();
journal.register(this);
producer = journal.getProducer(PRODUCER_ID);
} catch (RepositoryException e) {
throw new ClusterException(
"Cluster initialization failed: " + this, e);
} catch (JournalException e) {
throw new ClusterException(
"Journal initialization failed: " + this, e);
}
}
/**
* Set the stop delay, i.e. number of millseconds to wait for the
* synchronization thread to stop.
*
* @param stopDelay stop delay in milliseconds
*/
public void setStopDelay(long stopDelay) {
this.stopDelay = stopDelay;
}
/**
* Return the stop delay.
*
* @return stop delay
* @see #setStopDelay(long)
*/
public long getStopDelay() {
return stopDelay;
}
/**
* Disable periodic background synchronization. Used for testing purposes, only.
*/
protected void disableAutoSync() {
disableAutoSync = true;
}
/**
* Starts this cluster node.
*
* @throws ClusterException if an error occurs
*/
public synchronized void start() throws ClusterException {
if (status == NONE) {
syncOnStartup();
if (!disableAutoSync) {
Thread t = new Thread(this, "ClusterNode-" + clusterNodeId);
t.setDaemon(true);
t.start();
syncThread = t;
}
status = STARTED;
}
}
/**
* Run loop that will sync this node after some delay.
*/
public void run() {
for (;;) {
try {
if (stopLatch.attempt(syncDelay)) {
break;
}
} catch (InterruptedException e) {
String msg = "Interrupted while waiting for stop latch.";
log.warn(msg);
}
try {
sync();
} catch (ClusterException e) {
String msg = "Periodic sync of journal failed: " + e.getMessage();
log.error(msg, e);
} catch (Exception e) {
String msg = "Unexpected exception while syncing of journal: " + e.getMessage();
log.error(msg, e);
} catch (Error e) {
String msg = "Unexpected error while syncing of journal: " + e.getMessage();
log.error(msg, e);
throw e;
}
}
}
/**
* Synchronize contents from journal.
*
* @param startup indicates if the cluster node is syncing on startup
* or does a normal sync.
* @throws ClusterException if an error occurs
*/
private void internalSync(boolean startup) throws ClusterException {
int count = syncCount.get();
try {
syncLock.acquire();
} catch (InterruptedException e) {
String msg = "Interrupted while waiting for mutex.";
throw new ClusterException(msg);
}
try {
// JCR-1753: Only synchronize if no other thread already did so
// while we were waiting to acquire the syncLock.
if (count == syncCount.get()) {
syncCount.incrementAndGet();
journal.sync(startup);
}
} catch (JournalException e) {
throw new ClusterException(e.getMessage(), e.getCause());
} finally {
syncLock.release();
}
}
/**
* Synchronize contents from journal.
*
* @throws ClusterException if an error occurs
*/
public void sync() throws ClusterException {
internalSync(false);
}
/**
* Synchronize contents from journal when a {@link ClusterNode} starts up.
*
* @throws ClusterException if an error occurs
*/
public void syncOnStartup() throws ClusterException {
internalSync(true);
}
/**
* Stops this cluster node.
*/
public synchronized void stop() {
if (status != STOPPED) {
status = STOPPED;
stopLatch.release();
// Give synchronization thread some time to finish properly before
// closing down the journal (see JCR-1553)
if (syncThread != null) {
try {
syncThread.join(stopDelay);
} catch (InterruptedException e) {
String msg = "Interrupted while joining synchronization thread.";
log.warn(msg);
}
}
if (journal != null) {
journal.close();
}
if (instanceRevision != null) {
instanceRevision.close();
}
}
}
/**
* Create an {@link UpdateEventChannel} for some workspace.
*
* @param workspace workspace name
* @return lock event channel
*/
public UpdateEventChannel createUpdateChannel(String workspace) {
return new WorkspaceUpdateChannel(workspace);
}
/**
* Create a {@link LockEventChannel} for some workspace.
*
* @param workspace workspace name
* @return lock event channel
*/
public LockEventChannel createLockChannel(String workspace) {
return new WorkspaceLockChannel(workspace);
}
/**
* Return the journal created by this cluster node.
*
* @return journal
*/
public Journal getJournal() {
return journal;
}
//-----------------------------------------------< NamespaceEventListener >
/**
* {@inheritDoc}
*/
public void remapped(String oldPrefix, String newPrefix, String uri) {
if (status != STARTED) {
log.info("not started: namespace operation ignored.");
return;
}
ClusterRecord record = null;
boolean succeeded = false;
try {
record = new NamespaceRecord(oldPrefix, newPrefix, uri, producer.append());
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded && record != null) {
record.cancelUpdate();
}
}
}
public void setListener(NamespaceEventListener listener) {
namespaceListener = listener;
}
//------------------------------------------------< NodeTypeEventListener >
/**
* {@inheritDoc}
*/
public void registered(Collection ntDefs) {
if (status != STARTED) {
log.info("not started: nodetype operation ignored.");
return;
}
ClusterRecord record = null;
boolean succeeded = false;
try {
record = new NodeTypeRecord(ntDefs, true, producer.append());
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded && record != null) {
record.cancelUpdate();
}
}
}
/**
* {@inheritDoc}
*/
public void reregistered(QNodeTypeDefinition ntDef) {
if (status != STARTED) {
log.info("not started: nodetype operation ignored.");
return;
}
ClusterRecord record = null;
boolean succeeded = false;
try {
record = new NodeTypeRecord(ntDef, producer.append());
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded && record != null) {
record.cancelUpdate();
}
}
}
/**
* {@inheritDoc}
*/
public void unregistered(Collection qnames) {
if (status != STARTED) {
log.info("not started: nodetype operation ignored.");
return;
}
ClusterRecord record = null;
boolean succeeded = false;
try {
record = new NodeTypeRecord(qnames, false, producer.append());
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded && record != null) {
record.cancelUpdate();
}
}
}
/**
* {@inheritDoc}
*/
public void setListener(NodeTypeEventListener listener) {
nodeTypeListener = listener;
}
//----------------------------------------------< PrivilegeEventChannel >---
/**
* {@inheritDoc}
* @see PrivilegeEventChannel#registeredPrivileges(java.util.Collection)
*/
public void registeredPrivileges(Collection<PrivilegeDefinition> definitions) {
if (status != STARTED) {
log.info("not started: nodetype operation ignored.");
return;
}
ClusterRecord record = null;
boolean succeeded = false;
try {
record = new PrivilegeRecord(definitions, producer.append());
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded && record != null) {
record.cancelUpdate();
}
}
}
public void setListener(PrivilegeEventListener listener) {
privilegeListener = listener;
}
//--------------------------------------------------------------------------
/**
* Workspace update channel.
*/
class WorkspaceUpdateChannel implements UpdateEventChannel {
/**
* Attribute name used to store record.
*/
private static final String ATTRIBUTE_RECORD = "record";
/**
* Attribute name used to store the size of the update.
*/
private static final String ATTRIBUTE_UPDATE_SIZE = "updateSize";
/**
* Workspace name.
*/
private final String workspace;
/**
* Create a new instance of this class.
*
* @param workspace workspace name
*/
public WorkspaceUpdateChannel(String workspace) {
this.workspace = workspace;
}
/**
* {@inheritDoc}
*/
public void updateCreated(Update update) throws ClusterException {
if (status != STARTED) {
log.info("not started: update create ignored.");
return;
}
try {
Record record = producer.append();
update.setAttribute(ATTRIBUTE_RECORD, record);
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
throw new ClusterException(msg, e);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry: "
+ e.getMessage();
throw new ClusterException(msg, e);
}
}
/**
* {@inheritDoc}
*/
public void updatePrepared(Update update) throws ClusterException {
if (status != STARTED) {
log.info("not started: update prepare ignored.");
return;
}
Record record = (Record) update.getAttribute(ATTRIBUTE_RECORD);
if (record == null) {
String msg = "No record created.";
log.warn(msg);
return;
}
List<EventState> events = update.getEvents();
ChangeLog changes = update.getChanges();
boolean succeeded = false;
try {
ChangeLogRecord clr = new ChangeLogRecord(changes, events,
record, workspace, update.getTimestamp(),
update.getUserData());
clr.write();
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
throw new ClusterException(msg, e);
} catch (Throwable e) {
String msg = "Unexpected error while preparing log entry.";
throw new ClusterException(msg, e);
} finally {
if (!succeeded) {
record.cancelUpdate();
update.setAttribute(ATTRIBUTE_RECORD, null);
}
}
}
/**
* {@inheritDoc}
*/
public void updateCommitted(Update update, String path) {
Record record = (Record) update.getAttribute(ATTRIBUTE_RECORD);
if (record == null) {
if (status == STARTED) {
log.warn("No record prepared.");
} else {
log.info("not started: update commit ignored.");
}
return;
}
try {
long recordRevision = record.getRevision();
setRevision(recordRevision);
long journalUpdateSize = record.update();
log.debug("Stored record '{}' to Journal ({})", recordRevision, journalUpdateSize);
Object updateSizeValue = update.getAttribute(ATTRIBUTE_UPDATE_SIZE);
long updateSize = updateSizeValue != null? (Long)updateSizeValue : 0;
updateCount.compareAndSet(Integer.MAX_VALUE, 0);
auditLogger.info("[{}] {} {} ({})", new Object[]{updateCount.incrementAndGet(),
record.getRevision(), path, updateSize});
} catch (JournalException e) {
String msg = "Unable to commit log entry.";
log.error(msg, e);
} catch (Throwable e) {
String msg = "Unexpected error while committing log entry.";
log.error(msg, e);
} finally {
update.setAttribute(ATTRIBUTE_RECORD, null);
}
}
/**
* {@inheritDoc}
*/
public void updateCancelled(Update update) {
Record record = (Record) update.getAttribute(ATTRIBUTE_RECORD);
if (record != null) {
record.cancelUpdate();
update.setAttribute(ATTRIBUTE_RECORD, null);
}
}
/**
* {@inheritDoc}
*/
public void setListener(UpdateEventListener listener) {
if (workspace == null) {
versionUpdateListener = listener;
if (journal instanceof AbstractJournal &&
versionUpdateListener instanceof InternalVersionManagerImpl) {
((AbstractJournal) journal).setInternalVersionManager(
(InternalVersionManagerImpl) versionUpdateListener);
}
} else {
wspUpdateListeners.remove(workspace);
if (listener != null) {
wspUpdateListeners.put(workspace, listener);
}
}
}
}
/**
* Workspace lock channel.
*/
class WorkspaceLockChannel implements LockEventChannel {
/**
* Workspace name.
*/
private final String workspace;
/**
* Create a new instance of this class.
*
* @param workspace workspace name
*/
public WorkspaceLockChannel(String workspace) {
this.workspace = workspace;
}
/**
* {@inheritDoc}
*/
public ClusterOperation create(NodeId nodeId, boolean deep, String owner) {
if (status != STARTED) {
log.info("not started: lock operation ignored.");
return null;
}
try {
ClusterRecord record = new LockRecord(nodeId, deep, owner,
producer.append(), workspace);
return new DefaultClusterOperation(ClusterNode.this, record);
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
return null;
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
return null;
}
}
/**
* {@inheritDoc}
*/
public ClusterOperation create(NodeId nodeId) {
if (status != STARTED) {
log.info("not started: unlock operation ignored.");
return null;
}
try {
ClusterRecord record = new LockRecord(nodeId, producer.append(),
workspace);
return new DefaultClusterOperation(ClusterNode.this, record);
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
return null;
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
return null;
}
}
/**
* {@inheritDoc}
*/
public void setListener(LockEventListener listener) {
wspLockListeners.remove(workspace);
if (listener != null) {
wspLockListeners.put(workspace, listener);
}
}
}
//-------------------------------------------------------< RecordConsumer >
/**
* {@inheritDoc}
*/
public String getId() {
return PRODUCER_ID;
}
/**
* {@inheritDoc}
*/
public long getRevision() {
try {
return instanceRevision.get();
} catch (JournalException e) {
log.warn("Unable to return current revision.", e);
return Long.MAX_VALUE;
}
}
/**
* {@inheritDoc}
*/
public void consume(Record record) {
log.info("Processing revision: " + record.getRevision());
try {
deserializer.deserialize(record).process(this);
} catch (JournalException e) {
String msg = "Unable to read revision '" + record.getRevision() + "'.";
log.error(msg, e);
}
}
/**
* {@inheritDoc}
*/
public void setRevision(long revision) {
try {
instanceRevision.set(revision);
} catch (JournalException e) {
log.warn("Unable to set current revision to " + revision + ".", e);
}
}
//--------------------------------------------------- ClusterRecordProcessor
/**
* {@inheritDoc}
*/
public void process(ChangeLogRecord record) {
String workspace = record.getWorkspace();
UpdateEventListener listener = null;
if (workspace != null) {
listener = wspUpdateListeners.get(workspace);
if (listener == null) {
try {
clusterContext.updateEventsReady(workspace);
} catch (RepositoryException e) {
String msg = "Error making update listener for workspace " +
workspace + " online: " + e.getMessage();
log.warn(msg);
}
listener = wspUpdateListeners.get(workspace);
if (listener == null) {
String msg = "Update listener unavailable for workspace: " + workspace;
log.error(msg);
return;
}
}
} else {
if (versionUpdateListener != null) {
listener = versionUpdateListener;
} else {
String msg = "Version update listener unavailable.";
log.error(msg);
return;
}
}
try {
List<EventState> eventStates = record.getEvents();
String path = getFirstUserId(eventStates)
+ "@" + workspace
+ ":" + EventState.getCommonPath(eventStates, null);
updateCount.compareAndSet(Integer.MAX_VALUE, 0);
auditLogger.info("[{}] {} {}", new Object[]{updateCount.incrementAndGet(),
record.getRevision(), path});
listener.externalUpdate(record.getChanges(), eventStates,
record.getTimestamp(), record.getUserData());
} catch (RepositoryException e) {
String msg = "Unable to deliver update events: " + e.getMessage();
log.error(msg);
if (e.getCause() instanceof IllegalStateException) {
throw (IllegalStateException) e.getCause();
}
}
}
/**
* {@inheritDoc}
*/
public void process(LockRecord record) {
String workspace = record.getWorkspace();
LockEventListener listener = wspLockListeners.get(workspace);
if (listener == null) {
try {
clusterContext.lockEventsReady(workspace);
} catch (RepositoryException e) {
String msg = "Unable to make lock listener for workspace " +
workspace + " online: " + e.getMessage();
log.warn(msg);
}
listener = wspLockListeners.get(workspace);
if (listener == null) {
String msg = "Lock channel unavailable for workspace: " + workspace;
log.error(msg);
return;
}
}
try {
if (record.isLock()) {
listener.externalLock(record.getNodeId(), record.isDeep(),
record.getOwner());
} else {
listener.externalUnlock(record.getNodeId());
}
} catch (RepositoryException e) {
String msg = "Unable to deliver lock event: " + e.getMessage();
log.error(msg);
if (e.getCause() instanceof IllegalStateException) {
throw (IllegalStateException) e.getCause();
}
}
}
/**
* {@inheritDoc}
*/
public void process(NamespaceRecord record) {
if (namespaceListener == null) {
String msg = "Namespace listener unavailable.";
log.error(msg);
return;
}
try {
namespaceListener.externalRemap(record.getOldPrefix(),
record.getNewPrefix(), record.getUri());
} catch (RepositoryException e) {
String msg = "Unable to deliver namespace operation: " + e.getMessage();
log.error(msg);
}
}
/**
* {@inheritDoc}
*/
public void process(NodeTypeRecord record) {
if (nodeTypeListener == null) {
String msg = "NodeType listener unavailable.";
log.error(msg);
return;
}
Collection coll = record.getCollection();
try {
switch (record.getOperation()) {
case NodeTypeRecord.REGISTER:
nodeTypeListener.externalRegistered(coll);
break;
case NodeTypeRecord.UNREGISTER:
nodeTypeListener.externalUnregistered(coll);
break;
case NodeTypeRecord.REREGISTER:
QNodeTypeDefinition ntd = (QNodeTypeDefinition) coll.iterator().next();
nodeTypeListener.externalReregistered(ntd);
break;
}
} catch (InvalidNodeTypeDefException e) {
String msg = "Unable to deliver node type operation: " + e.getMessage();
log.error(msg);
} catch (RepositoryException e) {
String msg = "Unable to deliver node type operation: " + e.getMessage();
log.error(msg);
}
}
public void process(PrivilegeRecord record) {
if (privilegeListener == null) {
String msg = "Privilege listener unavailable.";
log.error(msg);
return;
}
try {
privilegeListener.externalRegisteredPrivileges(record.getDefinitions());
} catch (RepositoryException e) {
String msg = "Unable to deliver privilege registration operation: " + e.getMessage();
log.error(msg);
}
}
public void process(WorkspaceRecord record) {
if (createWorkspaceListener == null) {
String msg = "Create Workspace listener unavailable.";
log.error(msg);
return;
}
try {
if (record.getActionType() == WorkspaceRecord.CREATE_WORKSPACE_ACTION_TYPE) {
CreateWorkspaceAction action = record.getCreateWorkspaceAction();
createWorkspaceListener.externalWorkspaceCreated(record.getWorkspace(), action.getInputSource());
}
} catch (RepositoryException e) {
String msg = "Unable to create workspace: "
+ e.getMessage();
log.error(msg);
}
}
// -----------------------------------------------< CreateWorkspaceChannel >
public void setListener(WorkspaceListener listener) {
createWorkspaceListener = listener;
}
public void workspaceCreated(String workspaceName,
ClonedInputSource inputSource) {
if (status != STARTED) {
log.info("not started: namespace operation ignored.");
return;
}
ClusterRecord record = null;
boolean succeeded = false;
try {
record = new WorkspaceRecord(workspaceName, inputSource, producer.append());
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded && record != null) {
record.cancelUpdate();
}
}
}
/**
* Invoked when a cluster operation has ended. If <code>successful</code>,
* attempts to fill the journal record and update it, otherwise cancels
* the update.
*
* @param operation cluster operation
* @param successful <code>true</code> if the operation was successful and
* the journal record should be updated;
* <code>false</code> to revoke changes
*/
public void ended(DefaultClusterOperation operation, boolean successful) {
ClusterRecord record = operation.getRecord();
boolean succeeded = false;
try {
if (successful) {
record.write();
record.update();
setRevision(record.getRevision());
succeeded = true;
}
} catch (JournalException e) {
String msg = "Unable to create log entry: " + e.getMessage();
log.error(msg);
} catch (Throwable e) {
String msg = "Unexpected error while creating log entry.";
log.error(msg, e);
} finally {
if (!succeeded) {
record.cancelUpdate();
}
}
}
private String getFirstUserId(List<EventState> eventStates) {
if (eventStates == null || eventStates.isEmpty()) {
return "";
}
return eventStates.get(0).getUserId();
}
}
|
oracle/graal | 36,546 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/phases/common/InsertProxyPhase.java | /*
* Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.phases.common;
import java.util.ArrayDeque;
import java.util.BitSet;
import java.util.List;
import java.util.Optional;
import org.graalvm.collections.EconomicMap;
import org.graalvm.collections.Equivalence;
import jdk.graal.compiler.core.common.cfg.BlockMap;
import jdk.graal.compiler.core.common.cfg.CFGLoop;
import jdk.graal.compiler.core.common.util.CompilationAlarm;
import jdk.graal.compiler.debug.Assertions;
import jdk.graal.compiler.debug.DebugContext;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.graph.Node;
import jdk.graal.compiler.graph.NodeMap;
import jdk.graal.compiler.graph.Position;
import jdk.graal.compiler.nodeinfo.InputType;
import jdk.graal.compiler.nodes.AbstractMergeNode;
import jdk.graal.compiler.nodes.GraphState;
import jdk.graal.compiler.nodes.GuardPhiNode;
import jdk.graal.compiler.nodes.LoopExitNode;
import jdk.graal.compiler.nodes.MergeNode;
import jdk.graal.compiler.nodes.NodeView;
import jdk.graal.compiler.nodes.PhiNode;
import jdk.graal.compiler.nodes.ProxyNode;
import jdk.graal.compiler.nodes.StructuredGraph;
import jdk.graal.compiler.nodes.ValueNode;
import jdk.graal.compiler.nodes.ValuePhiNode;
import jdk.graal.compiler.nodes.calc.FloatingNode;
import jdk.graal.compiler.nodes.cfg.ControlFlowGraph;
import jdk.graal.compiler.nodes.cfg.HIRBlock;
import jdk.graal.compiler.nodes.extended.GuardingNode;
import jdk.graal.compiler.nodes.memory.MemoryAccess;
import jdk.graal.compiler.nodes.memory.MemoryKill;
import jdk.graal.compiler.nodes.memory.MemoryPhiNode;
import jdk.graal.compiler.nodes.memory.SingleMemoryKill;
import jdk.graal.compiler.nodes.util.GraphUtil;
import jdk.graal.compiler.phases.Phase;
import jdk.graal.compiler.phases.schedule.SchedulePhase;
import jdk.graal.compiler.phases.util.GraphOrder;
/**
* Phase that inserts proxies after partial evaluation. Performing the proxy insertion in a separate
* phase avoids proxying all values during partial evaluation and performing an expensive cleanup
* afterward.
* <p>
* Proxies are required for values that are defined inside a loop and are used outside this loop.
* <p>
* The basic idea of the algorithm is as follows:
* <li>Perform a schedule of all nodes.</li>
* <li>Iterate all nodes (input nodes) that are inside loops and check whether any of their usages
* is outside the loop of their input node.</li>
* <li>If a usage is outside the loop, find its corresponding loop exit (the loop exit block at
* which the value exits the loop) and insert a proxy for the node.</li>
* <li>If a loop is nested inside another loop, the added proxy of the inner loop gets added to the
* loop exit block inside the schedule, so it is processed again for the outer loop.</li>
*/
public class InsertProxyPhase extends Phase {
@Override
public Optional<NotApplicable> notApplicableTo(GraphState graphState) {
return ALWAYS_APPLICABLE;
}
private record ProxyKey(HIRBlock block, InputType type, ValueNode value) {
}
private record PhiKey(HIRBlock block, InputType type, ValueNode value) {
}
/**
* Loop information required for inserting value proxies.
*
* @param innerLoop Whether the loop is nested inside another loop.
* @param blockMap The block ids of blocks that are contained inside this loop.
* @param exits The exit blocks of this loop.
* @param replacements The replacement nodes for each exit block of the current loop. If a value
* was already proxied for one of its usages, we can reuse the replacement value.
* @param blockToNode The block to node map of the schedule.
*/
private record LoopScope(boolean innerLoop, BitSet blockMap, List<HIRBlock> exits,
EconomicMap<ProxyKey, ValueNode> replacements, BlockMap<List<Node>> blockToNode) {
/**
* Searches for the loop exit that dominates the given block. If so, by definition there can
* only be a single exit that dominates this block. Else we would have multiple loop exits
* that dominate each other which would be a broken graph.
*/
public HIRBlock exitForBlock(HIRBlock block) {
for (final HIRBlock exit : exits) {
if (exit.dominates(block)) {
return exit;
}
}
return null;
}
/**
* @return Whether the given block is outside this loop.
*/
private boolean blockOutsideLoop(HIRBlock block) {
return !blockMap.get(block.getId());
}
/**
* Creates a proxy for the given value. If a proxy already exists for the value at the given
* loop exit block, the proxy is reused.
*
* @param loopExitBlock The loop exit at which the value must be proxied (the point at which
* the value leaves the loop).
* @param value The value that must be proxied.
* @return A proxy for the given value that is associated with the loop exit.
*/
private ValueNode getOrCreateProxy(HIRBlock loopExitBlock, ValueNode value, InputType type, LoopScope loop, NodeMap<HIRBlock> nodeToBlock) {
ValueNode replacement = replacements.get(new ProxyKey(loopExitBlock, type, value));
if (replacement == null) {
assert loopExitBlock.getBeginNode() instanceof LoopExitNode : "Loop exit block begin must be a loop exit node";
final LoopExitNode loopExit = (LoopExitNode) loopExitBlock.getBeginNode();
if (type == InputType.Value) {
replacement = ProxyNode.forValue(value, loopExit);
} else if (type == InputType.Guard) {
assert value instanceof GuardingNode : "Input type guard requires a guarding node for proxying";
replacement = ProxyNode.forGuard((GuardingNode) value, loopExit);
} else if (type == InputType.Memory) {
GraalError.guarantee(MemoryKill.isSingleMemoryKill(value), "Can only proxy single kills and not multi kills %s", value);
replacement = ProxyNode.forMemory(MemoryKill.asSingleMemoryKill(value), loopExit, MemoryKill.asSingleMemoryKill(value).getKilledLocationIdentity());
} else if (type == InputType.Association || type == InputType.Extension || type == InputType.Condition) {
replacement = cloneNodesUntilNodesCanBeProxied(loopExitBlock, value, type, loop, nodeToBlock);
} else {
throw GraalError.shouldNotReachHere("Unsupported input type for proxy value " + type + " at node " + value);
}
replacements.put(new ProxyKey(loopExitBlock, type, value), replacement);
if (innerLoop) {
/*
* If the current loop is nested in another loop, we add the replacement to the
* exit block, so it is processed again for the outer loop.
*/
blockToNode.get(loopExitBlock).add(replacement);
}
}
return replacement;
}
/**
* Recursively duplicates a floating node until all of its inputs can be safely proxied at a
* given loop exit.
* <p>
* This method is used for handling inputs of types such as Association, Extension, or
* Condition, where normal proxy creation is not supported directly. The method creates a
* duplicate of the specified floating node, replacing its inputs that originate from inside
* the loop with appropriate proxies. For each input within the loop, the process recurses
* to ensure all required proxies are created, and the result is inserted into the schedule
* and block-to-node mapping for future reference.
* <p>
* If the value is not a floating node, an error is thrown. On success, the newly cloned
* node is fully set up for safe use outside the original loop.
*
* @return a duplicated node whose inputs have been appropriately handled so they may be
* proxied at the loop exit
*/
private ValueNode cloneNodesUntilNodesCanBeProxied(HIRBlock loopExitBlock, Node value, InputType type, LoopScope loop, NodeMap<HIRBlock> nodeToBlock) {
if (!(value instanceof FloatingNode)) {
throw GraalError.shouldNotReachHere("Cannot duplicate non floating nodes when creating proxies " + value + " for edge type=" + type);
}
ensureNodeCanBeDuplicated(value);
ValueNode duplicate = (ValueNode) value.copyWithInputs(true);
for (Position pos : duplicate.inputPositions()) {
Node currentInput = pos.get(duplicate);
if (pos.isInputOptional() && currentInput == null) {
continue;
}
HIRBlock inputBlock = getEffectiveBlock(currentInput, nodeToBlock);
if (!loop.blockOutsideLoop(inputBlock)) {
// only create a proxy if we are inside the loop
ValueNode newInput = getOrCreateProxy(loopExitBlock, (ValueNode) currentInput, pos.getInputType(), loop, nodeToBlock);
pos.set(duplicate, newInput);
}
}
// update the schedule with the new node, we might query it later again
nodeToBlock.setAndGrow(duplicate, loopExitBlock);
blockToNode.get(loopExitBlock).add(duplicate);
return duplicate;
}
/**
* Creates a phi for the given value at the nearest dominating merge with a predecessor
* block dominated by a loop exit.
*
* Consider a loop with a random node {@code value} that is defined inside the loop. The
* value is used after the loop - the usage block itself has no single loop exit dominating
* it but a merge (b2) that has 2 predecessors where all of them are dominated by a
* different loop exit. Since bX dominates b2 there is no phi necessary without proxies.
* After adding proxies to b0 (and a block dominating b1) the values no longer dominate b2,
* and thus we need a phi.
*
* <pre>
*
* |------------|
* bX | Some |
* | Block | <-define(value)
* | in |
* | Loop |
* |------------|
*
*
* |-----------| |-----------|
* b1 | Begin | b0 | Loop Exit | <-- requires a proxy
* |-----------| |-----------|
* \ /
* \ /
* |-----------|
* b2 | Merge | <-- requires a phi
* |-----------|
* |
* |-----------|
* b3 | If |
* |-----------|
* / \
* / \
* |-----------| |-----------|
* b4 | Begin | b5 | Begin |
* |-----------| |-----------|
* \ /
* \ /
* |-----------|
* b6 | Merge |
* |-----------|
* |
* |-----------|
* b7 | Block | <-- usage(value)
* |-----------|
* </pre>
* <p>
* If we assume that the value is in b7 in the example above, we would move up the
* dominators and first find b6. Since b6 does not have a predecessor block that is
* dominated by a loop exit, we do not need to insert a phi, since there is another common
* dominator, b3, in between the current block and the loop. Since b3 is not a merge, we
* continue at its dominator b2. Since b2 has a predecessor block dominated by a loop exit
* (b0), we insert a proxy at the loop exit and create a phi at the merge (b2).
*/
private ValueNode createPhiAtLoopExitMerge(Node value, InputType type, HIRBlock usageBlock, StructuredGraph graph, LoopScope loop, NodeMap<HIRBlock> nodeToBlock,
EconomicMap<PhiKey, ValueNode> phiCache) {
graph.getDebug().dump(DebugContext.VERY_DETAILED_LEVEL, graph, "Before creating phi at lex merge value=%s type=%s usageBlock.Begin=%s.%s", value, type, usageBlock,
usageBlock.getBeginNode());
HIRBlock block = usageBlock;
while (block != null && block.getDominatorDepth() > 0) {
// As long as we are not at a merge, move up the dominators
while (block.getPredecessorCount() == 1) {
block = block.getDominator();
}
// after skipping predecessor blocks check if we need a phi
if (mergeNeedsExitPhi(block)) {
break;
}
CompilationAlarm.checkProgress(graph);
// Move up the dominators to find the next candidate block
block = block.getDominator();
}
GraalError.guarantee(block != null && block != block.getCfg().getStartBlock(),
"Must find a block(!=null) and must not walk up to the start, need to find an exit block for %s.%s, found =%s", usageBlock, usageBlock.getBeginNode(), block);
PhiKey phiKey = new PhiKey(block, type, (ValueNode) value);
if (phiCache.containsKey(phiKey)) {
return phiCache.get(phiKey);
}
final AbstractMergeNode merge = (AbstractMergeNode) block.getBeginNode();
final int predecessorCount = block.getPredecessorCount();
if (type == InputType.Association || type == InputType.Extension || type == InputType.Condition) {
/*
* We have to duplicate this node and create phis for the inputs.
*/
ValueNode duplicate = duplicateNonProxyAbleFloatingNodes(value, usageBlock, graph, loop, nodeToBlock, phiCache, predecessorCount, block, merge);
// update the schedule with the new node, we might query it later again
// put the new node in the map as well
nodeToBlock.setAndGrow(duplicate, block);
blockToNode.get(block).add(duplicate);
// cache the entry
phiCache.put(phiKey, duplicate);
return duplicate;
} else if (type == InputType.Guard || type == InputType.Value || type == InputType.Memory) {
ValueNode[] phiValues = proxyInputsIfNecessary(graph, loop, nodeToBlock, phiCache, predecessorCount, block, value, type);
if (type == InputType.Value) {
ValueNode replacement = graph.addOrUniqueWithInputs(new ValuePhiNode(phiValues[0].stamp(NodeView.DEFAULT), merge, phiValues));
phiCache.put(phiKey, replacement);
return replacement;
} else if (type == InputType.Guard) {
ValueNode replacement = graph.addOrUniqueWithInputs(new GuardPhiNode(merge, phiValues));
phiCache.put(phiKey, replacement);
return replacement;
} else if (type == InputType.Memory) {
ValueNode replacement = graph.addOrUniqueWithInputs(new MemoryPhiNode(merge, MemoryKill.asSingleMemoryKill(phiValues[0]).getKilledLocationIdentity(), phiValues));
phiCache.put(phiKey, replacement);
return replacement;
} else {
throw GraalError.shouldNotReachHere(
"Unsupported input type for proxy value " + value + " with edge type=" + type + " usageBlock=" + usageBlock + ".begin=" + usageBlock.getBeginNode());
}
}
throw GraalError.shouldNotReachHere(
"Unsupported input type for proxy value " + value + " with edge type=" + type + " usageBlock=" + usageBlock + ".begin=" + usageBlock.getBeginNode());
}
/**
* Determines if the current merge need phis for proxies of the given loop exits. That is,
* this merge is not dominated by a single exit and no predecessor either.
*/
private boolean mergeNeedsExitPhi(HIRBlock block) {
/*
* We are at a merge block, if our dominator is dominated by an exit or one our
* predecessors we can stop iterating.
*/
if (block.getBeginNode() instanceof MergeNode) {
/*
* Check, if at least one of the merge predecessors is dominated by a loop exit. If
* this is the case, create a new phi for the value. Otherwise, we are at a merge
* that does not require a phi for the value.
*/
final int predecessorCount = block.getPredecessorCount();
for (int i = 0; i < predecessorCount; i++) {
final HIRBlock predecessor = block.getPredecessorAt(i);
final HIRBlock predecessorLoopExit = exitForBlock(predecessor);
if (predecessorLoopExit != null) {
// predecssor exits, require a phi here
return true;
}
}
final HIRBlock predecessorLoopExit = exitForBlock(block.getDominator());
if (predecessorLoopExit == null) {
/*
* Neither one of our predecessors (forward ends for the current merge) nor our
* dominator is dominated by a loop exit. We have to create this phi. This can
* happen for very convoluted merge patterns of non-canonical graphs.
*/
return true;
}
}
return false;
}
/**
* Creates a duplicate of a non-proxyable floating node, replacing its inputs with proxies
* or phis as necessary.
* <p>
* This method recursively traverses the inputs of a given floating node and, for inputs
* that are still within the current loop, replaces them with value phis, guard phis, or
* memory phis at the merge block corresponding to the given usage context. This ensures
* that all (nested) inputs are correctly proxied as needed for values that flow out of
* their defining loop.
* <p>
* The duplicate node will have the same structural characteristics as the original, except
* where any inputs are replaced with suitable proxies or phis, guaranteeing proper
* loop-exit semantics.
*/
private ValueNode duplicateNonProxyAbleFloatingNodes(Node value, HIRBlock usageBlock, StructuredGraph graph, LoopScope loop, NodeMap<HIRBlock> nodeToBlock,
EconomicMap<PhiKey, ValueNode> phiCache, int predecessorCount, HIRBlock block, AbstractMergeNode merge) {
ensureNodeCanBeDuplicated(value);
ValueNode duplicate = (ValueNode) value.copyWithInputs(true);
for (Position pos : duplicate.inputPositions()) {
Node currentInput = pos.get(duplicate);
if (pos.isInputOptional() && currentInput == null) {
continue;
}
InputType currentInputType = pos.getInputType();
final HIRBlock currentInputBlock = getEffectiveBlock(currentInput, nodeToBlock);
if (!loop.blockOutsideLoop(currentInputBlock)) {
ValueNode[] phiValues = proxyInputsIfNecessary(graph, loop, nodeToBlock, phiCache, predecessorCount, block, currentInput, currentInputType);
ValueNode replacement = null;
if (currentInputType == InputType.Value) {
replacement = graph.addOrUniqueWithInputs(new ValuePhiNode(phiValues[0].stamp(NodeView.DEFAULT), merge, phiValues));
} else if (currentInputType == InputType.Guard) {
replacement = graph.addOrUniqueWithInputs(new GuardPhiNode(merge, phiValues));
} else if (currentInputType == InputType.Memory) {
// cannot phi multi kills
replacement = graph.addOrUniqueWithInputs(new MemoryPhiNode(merge, ((SingleMemoryKill) phiValues[0]).getKilledLocationIdentity(), phiValues));
} else {
throw GraalError.shouldNotReachHere(
"Unsupported input type for proxy value " + value + " with edge type=" + currentInputType + " usageBlock=" + usageBlock + ".begin=" +
usageBlock.getBeginNode());
}
// only create a proxy if we are inside the loop
pos.set(duplicate, replacement);
}
}
return duplicate;
}
/**
* Generates an array of value nodes (proxies or phis as needed) for a given node input at a
* merge point.
* <p>
* For each predecessor of the given merge block, this method determines whether the input
* node needs to be proxied because it leaves a loop, or whether it should recursively
* create phi nodes if the predecessor is itself dominated by multiple loop exits. This
* logic ensures that all values flowing between loops and merge points are correctly
* represented with the appropriate node (proxy or phi) to maintain correct data and control
* dependencies across loop boundaries.
*/
private ValueNode[] proxyInputsIfNecessary(StructuredGraph graph, LoopScope loop, NodeMap<HIRBlock> nodeToBlock, EconomicMap<PhiKey, ValueNode> phiCache, int predecessorCount, HIRBlock block,
Node currentInput, InputType currentInputType) {
final ValueNode[] phiValues = new ValueNode[predecessorCount];
for (int i = 0; i < predecessorCount; i++) {
final HIRBlock predecessor = block.getPredecessorAt(i);
final HIRBlock predecessorLoopExit = exitForBlock(predecessor);
final ValueNode phiValue;
if (predecessorLoopExit == null) {
/*
* The predecessor is again dominated by a merge of multiple loop exits, so we
* recursively create another phi as input to this phi.
*/
phiValue = createPhiAtLoopExitMerge(currentInput, currentInputType, predecessor, graph, loop, nodeToBlock, phiCache);
} else {
phiValue = getOrCreateProxy(predecessorLoopExit, (ValueNode) currentInput, currentInputType, loop, nodeToBlock);
}
phiValues[i] = phiValue;
}
return phiValues;
}
/**
* Replaces the inputs of the phi that correspond to the given value by a proxy.
*/
private void processPhiInputs(ValueNode value, InputType type, PhiNode phi, HIRBlock usageBlock, StructuredGraph graph, LoopScope loop, NodeMap<HIRBlock> nodeToBlock,
EconomicMap<PhiKey, ValueNode> phiCache) {
for (int i = 0; i < phi.valueCount(); i++) {
if (phi.valueAt(i) == value) {
final HIRBlock predecessor = usageBlock.getPredecessorAt(i);
final HIRBlock predecessorLoopExit = exitForBlock(predecessor);
final ValueNode phiValue;
if (predecessorLoopExit == null) {
/*
* The predecessor is dominated by a merge of multiple loop exits, so we
* create a phi for the given value.
*/
phiValue = createPhiAtLoopExitMerge(value, type, predecessor, graph, loop, nodeToBlock, phiCache);
} else {
phiValue = getOrCreateProxy(predecessorLoopExit, value, type, loop, nodeToBlock);
}
phi.setValueAt(i, phiValue);
}
}
}
/**
* Adds a proxy for the inputs of the phi.
*/
public void processPhiNodeUsage(ValueNode value, PhiNode phi, HIRBlock usageBlock, StructuredGraph graph, LoopScope loop, NodeMap<HIRBlock> nodeToBlock,
EconomicMap<PhiKey, ValueNode> phiCache) {
if (phi instanceof ValuePhiNode) {
processPhiInputs(value, InputType.Value, phi, usageBlock, graph, loop, nodeToBlock, phiCache);
} else if (phi instanceof GuardPhiNode) {
processPhiInputs(value, InputType.Guard, phi, usageBlock, graph, loop, nodeToBlock, phiCache);
} else if (phi instanceof MemoryPhiNode) {
processPhiInputs(value, InputType.Memory, phi, usageBlock, graph, loop, nodeToBlock, phiCache);
} else {
throw GraalError.shouldNotReachHere("Unsupported input type for proxy value " + phi);
}
}
public void processValueNodeUsage(ValueNode value, Node usage, HIRBlock usageBlock, StructuredGraph graph, LoopScope loop, NodeMap<HIRBlock> nodeToBlock,
EconomicMap<PhiKey, ValueNode> phiCache) {
final HIRBlock exit = exitForBlock(usageBlock);
for (Position pos : usage.inputPositions()) {
if (pos.get(usage) == value) {
final ValueNode proxy;
if (exit == null) {
/*
* If there's no single loop exit dominating the usage block, it may be due
* to a control-flow merge between the usage block and loop exits. In this
* case, we search for the next merge dominating the usage block that
* requires a phi for the value. See {@link #createPhiAtLoopExitMerge} for
* details.
*/
proxy = createPhiAtLoopExitMerge(value, pos.getInputType(), usageBlock, graph, loop, nodeToBlock, phiCache);
} else {
proxy = getOrCreateProxy(exit, value, pos.getInputType(), loop, nodeToBlock);
}
pos.set(usage, proxy);
}
}
}
}
/**
* Searches for a matching loop scope for the given loop. If the loop was not seen before, a new
* loop scope is created.
*/
private static LoopScope getOrCreateLoopScope(CFGLoop<HIRBlock> loop, EconomicMap<CFGLoop<HIRBlock>, LoopScope> loopScopes, EconomicMap<ProxyKey, ValueNode> replacements,
BlockMap<List<Node>> blockToNode) {
LoopScope scope = loopScopes.get(loop);
if (scope == null) {
final BitSet blockMap = new BitSet();
for (HIRBlock block : loop.getBlocks()) {
blockMap.set(block.getId());
}
final List<HIRBlock> loopExits = loop.getLoopExits();
scope = new LoopScope(loop.getParent() != null, blockMap, loopExits, replacements, blockToNode);
loopScopes.put(loop, scope);
}
return scope;
}
@Override
protected void run(StructuredGraph graph) {
if (!graph.hasLoops()) {
return;
}
removeProxies(graph);
// if there is dead code clean it up already
final boolean immutableGraph = false;
// do not verify proxies because we are building them
final boolean verifyProxies = false;
SchedulePhase.runWithoutContextOptimizations(graph, SchedulePhase.SchedulingStrategy.LATEST_OUT_OF_LOOPS, ControlFlowGraph.computeForSchedule(graph), immutableGraph, verifyProxies);
final HIRBlock[] reversePostOrder = graph.getLastCFG().reversePostOrder();
final StructuredGraph.ScheduleResult schedule = graph.getLastSchedule();
final BlockMap<List<Node>> blockToNode = schedule.getBlockToNodesMap();
final NodeMap<HIRBlock> nodeToBlock = schedule.getNodeToBlockMap();
final EconomicMap<CFGLoop<HIRBlock>, LoopScope> loopScopes = EconomicMap.create(Equivalence.IDENTITY);
final EconomicMap<ProxyKey, ValueNode> replacements = EconomicMap.create();
final EconomicMap<PhiKey, ValueNode> phiCache = EconomicMap.create();
/*
* We maintain a working set, so we don't have to create a new array for each set of node
* usages.
*/
final ArrayDeque<Node> workingSet = new ArrayDeque<>();
for (HIRBlock block : reversePostOrder) {
final CFGLoop<HIRBlock> loop = block.getLoop();
if (loop == null) {
// We are outside a loop, no nodes to process
continue;
}
final LoopScope scope = getOrCreateLoopScope(loop, loopScopes, replacements, blockToNode);
for (Node node : blockToNode.get(block)) {
if (node instanceof AbstractMergeNode merge) {
for (PhiNode phi : merge.phis()) {
processNode(phi, graph, workingSet, nodeToBlock, scope, replacements, phiCache);
}
} else {
processNode(node, graph, workingSet, nodeToBlock, scope, replacements, phiCache);
}
}
}
assert GraphOrder.assertSchedulableGraph(graph);
}
/**
* <p>
* <b>Note on Pre-existing Proxies and Cleanup:</b> Due to Truffle-level inlining, proxies may
* already exist in parts of the graph. Since proxies (and phis) are handled specially by the
* scheduler, they can create subtle issues with floating nodes or chains, making it unclear
* whether a node is inside or outside a loop. This can lead to non-decidable schedules. To
* prevent such issues, all proxies must be deleted from the graph before running this phase.
* </p>
*
* See InsertProxyReProxyTest.java for a background.
*/
private static void removeProxies(StructuredGraph graph) {
for (LoopExitNode exit : graph.getNodes(LoopExitNode.TYPE)) {
exit.removeProxies();
}
}
private static void processNode(Node node, StructuredGraph graph, ArrayDeque<Node> workingSet, NodeMap<HIRBlock> nodeToBlock, LoopScope loop, EconomicMap<ProxyKey, ValueNode> replacements,
EconomicMap<PhiKey, ValueNode> phiCache) {
if (node.hasUsages() && node instanceof ValueNode value) {
node.usages().snapshotTo(workingSet);
while (!workingSet.isEmpty()) {
final Node usage = workingSet.pop();
/*
* PhiNodes and ProxyNodes are not scheduled into a block. Therefore, we use their
* associated merges and proxy points as reference points.
*/
if (usage instanceof PhiNode phi) {
final HIRBlock phiUsageBlock = nodeToBlock.get(phi.merge());
if (loop.blockOutsideLoop(phiUsageBlock)) {
loop.processPhiNodeUsage(value, phi, phiUsageBlock, graph, loop, nodeToBlock, phiCache);
}
} else if (usage instanceof ProxyNode proxy) {
final HIRBlock proxyUsageBlock = nodeToBlock.get(proxy.proxyPoint());
/*
* If the proxy is already at an exit of the current loop (the dominating loop
* exit is the usage block), we don't have to process it.
*/
if (loop.exitForBlock(proxyUsageBlock) != proxyUsageBlock) {
if (loop.blockOutsideLoop(proxyUsageBlock)) {
loop.processValueNodeUsage(value, proxy, proxyUsageBlock, graph, loop, nodeToBlock, phiCache);
}
}
} else {
final HIRBlock usageBlock = nodeToBlock.get(usage);
if (loop.blockOutsideLoop(usageBlock)) {
loop.processValueNodeUsage(value, usage, usageBlock, graph, loop, nodeToBlock, phiCache);
}
}
}
/*
* The replacements are cached for each node. We have to clear the map when we are
* finished with processing the node.
*/
replacements.clear();
}
}
/**
* Retrieves the effective block for a given node, handling {@link PhiNode} and
* {@link ProxyNode} by resolving to their associated {@link MergeNode} or {@link LoopExitNode}
* block, respectively.
*/
private static HIRBlock getEffectiveBlock(Node current, NodeMap<HIRBlock> blockToNode) {
// if we created the phi or proxy
HIRBlock block = blockToNode.isNew(current) ? null : blockToNode.get(current);
if (block == null) {
assert current instanceof PhiNode || current instanceof ProxyNode : Assertions.errorMessage("Only phis are not scheduled (in a late schedule)", current);
if (current instanceof PhiNode phi) {
block = blockToNode.get(phi.merge());
} else if (current instanceof ProxyNode proxy) {
block = blockToNode.get(proxy.proxyPoint());
} else {
throw GraalError.shouldNotReachHere("Node without a schedule " + current);
}
}
return block;
}
/**
* Verifies that a given node can be safely duplicated.
* <p>
* A node is considered duplicable if it is a floating node and its semantics is safe in and
* outside the loop. This is generally true for all floating nodes (since they are side effect
* free) except for memory accesses. This is crucial because duplicating a memory access outside
* its original loop could interfere with the memory graph, potentially causing inconsistencies
* or incorrect behavior if the loop overlaps with the locations of the access.
*/
private static void ensureNodeCanBeDuplicated(Node n) {
GraalError.guarantee(GraphUtil.isFloatingNode(n), "Node must be a floating node to be duplicated");
GraalError.guarantee(!(n instanceof MemoryAccess), "Cannot duplicate memory access outside of loop, memory graph could be in the way");
}
}
|
apache/poi | 36,142 | poi-ooxml/src/main/java/org/apache/poi/xslf/usermodel/XSLFSimpleShape.java | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.apache.poi.xslf.usermodel;
import java.awt.Color;
import java.awt.geom.Rectangle2D;
import org.apache.logging.log4j.Logger;
import org.apache.poi.logging.PoiLogManager;
import org.apache.poi.ooxml.util.POIXMLUnits;
import org.apache.poi.openxml4j.opc.PackagePart;
import org.apache.poi.sl.draw.DrawPaint;
import org.apache.poi.sl.draw.geom.CustomGeometry;
import org.apache.poi.sl.draw.geom.Guide;
import org.apache.poi.sl.draw.geom.PresetGeometries;
import org.apache.poi.sl.usermodel.FillStyle;
import org.apache.poi.sl.usermodel.LineDecoration;
import org.apache.poi.sl.usermodel.LineDecoration.DecorationShape;
import org.apache.poi.sl.usermodel.LineDecoration.DecorationSize;
import org.apache.poi.sl.usermodel.PaintStyle;
import org.apache.poi.sl.usermodel.PaintStyle.SolidPaint;
import org.apache.poi.sl.usermodel.ShapeType;
import org.apache.poi.sl.usermodel.SimpleShape;
import org.apache.poi.sl.usermodel.StrokeStyle;
import org.apache.poi.sl.usermodel.StrokeStyle.LineCap;
import org.apache.poi.sl.usermodel.StrokeStyle.LineCompound;
import org.apache.poi.sl.usermodel.StrokeStyle.LineDash;
import org.apache.poi.util.Beta;
import org.apache.poi.util.Units;
import org.apache.poi.xslf.draw.geom.XSLFCustomGeometry;
import org.apache.poi.xslf.model.PropertyFetcher;
import org.apache.poi.xslf.usermodel.XSLFPropertiesDelegate.XSLFEffectProperties;
import org.apache.poi.xslf.usermodel.XSLFPropertiesDelegate.XSLFFillProperties;
import org.apache.poi.xslf.usermodel.XSLFPropertiesDelegate.XSLFGeometryProperties;
import org.apache.xmlbeans.XmlObject;
import org.openxmlformats.schemas.drawingml.x2006.main.*;
/**
* Represents a single (non-group) shape in a .pptx slide show
*/
@Beta
public abstract class XSLFSimpleShape extends XSLFShape
implements SimpleShape<XSLFShape,XSLFTextParagraph> {
private static final CTOuterShadowEffect NO_SHADOW = CTOuterShadowEffect.Factory.newInstance();
private static final Logger LOG = PoiLogManager.getLogger(XSLFSimpleShape.class);
/* package */XSLFSimpleShape(XmlObject shape, XSLFSheet sheet) {
super(shape,sheet);
}
@Override
public void setShapeType(ShapeType type) {
XSLFGeometryProperties gp = XSLFPropertiesDelegate.getGeometryDelegate(getShapeProperties());
if (gp == null) {
return;
}
if (gp.isSetCustGeom()) {
gp.unsetCustGeom();
}
CTPresetGeometry2D prst = (gp.isSetPrstGeom()) ? gp.getPrstGeom() : gp.addNewPrstGeom();
prst.setPrst(STShapeType.Enum.forInt(type.ooxmlId));
}
@Override
public ShapeType getShapeType(){
XSLFGeometryProperties gp = XSLFPropertiesDelegate.getGeometryDelegate(getShapeProperties());
if (gp != null && gp.isSetPrstGeom()) {
STShapeType.Enum geom = gp.getPrstGeom().getPrst();
if (geom != null) {
return ShapeType.forId(geom.intValue(), true);
}
}
return null;
}
protected CTTransform2D getXfrm(boolean create) {
PropertyFetcher<CTTransform2D> fetcher = new PropertyFetcher<CTTransform2D>() {
@Override
public boolean fetch(XSLFShape shape) {
XmlObject xo = shape.getShapeProperties();
if (xo instanceof CTShapeProperties && ((CTShapeProperties)xo).isSetXfrm()) {
setValue(((CTShapeProperties)xo).getXfrm());
return true;
}
return false;
}
};
fetchShapeProperty(fetcher);
CTTransform2D xfrm = fetcher.getValue();
if (!create || xfrm != null) {
return xfrm;
} else {
XmlObject xo = getShapeProperties();
if (xo instanceof CTShapeProperties) {
return ((CTShapeProperties)xo).addNewXfrm();
} else {
// ... group shapes have their own getXfrm()
LOG.atWarn().log("{} doesn't have xfrm element.", getClass());
return null;
}
}
}
@Override
public Rectangle2D getAnchor() {
CTTransform2D xfrm = getXfrm(false);
if (xfrm == null || !xfrm.isSetOff()) {
return null;
}
CTPoint2D off = xfrm.getOff();
double x = Units.toPoints(POIXMLUnits.parseLength(off.xgetX()));
double y = Units.toPoints(POIXMLUnits.parseLength(off.xgetY()));
CTPositiveSize2D ext = xfrm.getExt();
double cx = Units.toPoints(ext.getCx());
double cy = Units.toPoints(ext.getCy());
return new Rectangle2D.Double(x, y, cx, cy);
}
@Override
public void setAnchor(Rectangle2D anchor) {
CTTransform2D xfrm = getXfrm(true);
if (xfrm == null) {
return;
}
CTPoint2D off = xfrm.isSetOff() ? xfrm.getOff() : xfrm.addNewOff();
long x = Units.toEMU(anchor.getX());
long y = Units.toEMU(anchor.getY());
off.setX(x);
off.setY(y);
CTPositiveSize2D ext = xfrm.isSetExt() ? xfrm.getExt() : xfrm
.addNewExt();
long cx = Units.toEMU(anchor.getWidth());
long cy = Units.toEMU(anchor.getHeight());
ext.setCx(cx);
ext.setCy(cy);
}
@Override
public void setRotation(double theta) {
CTTransform2D xfrm = getXfrm(true);
if (xfrm != null) {
xfrm.setRot((int) (theta * 60000));
}
}
@Override
public double getRotation() {
CTTransform2D xfrm = getXfrm(false);
return (xfrm == null || !xfrm.isSetRot()) ? 0 : (xfrm.getRot() / 60000.d);
}
@Override
public void setFlipHorizontal(boolean flip) {
CTTransform2D xfrm = getXfrm(true);
if (xfrm != null) {
xfrm.setFlipH(flip);
}
}
@Override
public void setFlipVertical(boolean flip) {
CTTransform2D xfrm = getXfrm(true);
if (xfrm != null) {
xfrm.setFlipV(flip);
}
}
@Override
public boolean getFlipHorizontal() {
CTTransform2D xfrm = getXfrm(false);
return (xfrm != null && xfrm.isSetFlipH()) && xfrm.getFlipH();
}
@Override
public boolean getFlipVertical() {
CTTransform2D xfrm = getXfrm(false);
return (xfrm != null && xfrm.isSetFlipV()) && xfrm.getFlipV();
}
/**
* Get default line properties defined in the theme (if any).
* Used internally to resolve shape properties.
*
* @return line properties from the theme of null
*/
private CTLineProperties getDefaultLineProperties() {
CTShapeStyle style = getSpStyle();
if (style == null) {
return null;
}
CTStyleMatrixReference lnRef = style.getLnRef();
if (lnRef == null) {
return null;
}
// 1-based index of a line style within the style matrix
int idx = Math.toIntExact(lnRef.getIdx());
XSLFTheme theme = getSheet().getTheme();
if (theme == null) {
return null;
}
CTBaseStyles styles = theme.getXmlObject().getThemeElements();
if (styles == null) {
return null;
}
CTStyleMatrix styleMatrix = styles.getFmtScheme();
if (styleMatrix == null) {
return null;
}
CTLineStyleList lineStyles = styleMatrix.getLnStyleLst();
if (lineStyles == null || lineStyles.sizeOfLnArray() < idx) {
return null;
}
return lineStyles.getLnArray(idx - 1);
}
/**
* @param color the color to paint the shape outline.
* A {@code null} value turns off the shape outline.
*/
public void setLineColor(Color color) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
if (ln.isSetSolidFill()) {
ln.unsetSolidFill();
}
if (ln.isSetGradFill()) {
ln.unsetGradFill();
}
if (ln.isSetPattFill()) {
ln.unsetPattFill();
}
if (ln.isSetNoFill()) {
ln.unsetNoFill();
}
if (color == null) {
ln.addNewNoFill();
} else {
CTSolidColorFillProperties fill = ln.addNewSolidFill();
XSLFColor col = new XSLFColor(fill, getSheet().getTheme(), fill.getSchemeClr(), getSheet());
col.setColor(color);
}
}
/**
*
* @return the color of the shape outline or {@code null}
* if outline is turned off
*/
@SuppressWarnings("WeakerAccess")
public Color getLineColor() {
PaintStyle ps = getLinePaint();
if (ps instanceof SolidPaint) {
return ((SolidPaint)ps).getSolidColor().getColor();
}
return null;
}
@SuppressWarnings("WeakerAccess")
protected PaintStyle getLinePaint() {
XSLFSheet sheet = getSheet();
final XSLFTheme theme = sheet.getTheme();
final boolean hasPlaceholder = getPlaceholder() != null;
PropertyFetcher<PaintStyle> fetcher = new PropertyFetcher<PaintStyle>() {
@Override
public boolean fetch(XSLFShape shape) {
CTLineProperties spPr = getLn(shape, false);
XSLFFillProperties fp = XSLFPropertiesDelegate.getFillDelegate(spPr);
if (fp != null && fp.isSetNoFill()) {
setValue(null);
return true;
}
PackagePart pp = shape.getSheet().getPackagePart();
PaintStyle paint = selectPaint(fp, null, pp, theme, hasPlaceholder);
if (paint != null) {
setValue(paint);
return true;
}
CTShapeStyle style = shape.getSpStyle();
if (style != null) {
fp = XSLFPropertiesDelegate.getFillDelegate(style.getLnRef());
paint = selectPaint(fp, null, pp, theme, hasPlaceholder);
// line color was not found, check if it is defined in the theme
if (paint == null) {
paint = getThemePaint(style, pp);
}
}
if (paint != null) {
setValue(paint);
return true;
}
return false;
}
PaintStyle getThemePaint(CTShapeStyle style, PackagePart pp) {
// get a reference to a line style within the style matrix.
CTStyleMatrixReference lnRef = style.getLnRef();
if (lnRef == null) {
return null;
}
int idx = Math.toIntExact(lnRef.getIdx());
CTSchemeColor phClr = lnRef.getSchemeClr();
if(idx <= 0){
return null;
}
CTLineProperties props = theme.getXmlObject().getThemeElements().getFmtScheme().getLnStyleLst().getLnArray(idx - 1);
XSLFFillProperties fp = XSLFPropertiesDelegate.getFillDelegate(props);
return selectPaint(fp, phClr, pp, theme, hasPlaceholder);
}
};
fetchShapeProperty(fetcher);
return fetcher.getValue();
}
/**
*
* @param width line width in points. {@code 0} means no line
*/
@SuppressWarnings("WeakerAccess")
public void setLineWidth(double width) {
CTLineProperties lnPr = getLn(this, true);
if (lnPr == null) {
return;
}
if (width == 0.) {
if (lnPr.isSetW()) {
lnPr.unsetW();
}
if (!lnPr.isSetNoFill()) {
lnPr.addNewNoFill();
}
if (lnPr.isSetSolidFill()) {
lnPr.unsetSolidFill();
}
if (lnPr.isSetGradFill()) {
lnPr.unsetGradFill();
}
if (lnPr.isSetPattFill()) {
lnPr.unsetPattFill();
}
} else {
if (lnPr.isSetNoFill()) {
lnPr.unsetNoFill();
}
lnPr.setW(Units.toEMU(width));
}
}
/**
* @return line width in points. {@code 0} means no line.
*/
@SuppressWarnings("WeakerAccess")
public double getLineWidth() {
PropertyFetcher<Double> fetcher = new PropertyFetcher<Double>() {
@Override
public boolean fetch(XSLFShape shape) {
CTLineProperties ln = getLn(shape, false);
if (ln != null) {
if (ln.isSetNoFill()) {
setValue(0.);
return true;
}
if (ln.isSetW()) {
setValue(Units.toPoints(ln.getW()));
return true;
}
}
return false;
}
};
fetchShapeProperty(fetcher);
double lineWidth = 0;
if (fetcher.getValue() == null) {
CTLineProperties defaultLn = getDefaultLineProperties();
if (defaultLn != null) {
if (defaultLn.isSetW()) {
lineWidth = Units.toPoints(defaultLn.getW());
}
}
} else {
lineWidth = fetcher.getValue();
}
return lineWidth;
}
/**
* @param compound set the line compound style
*/
@SuppressWarnings("WeakerAccess")
public void setLineCompound(LineCompound compound) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
if (compound == null) {
if (ln.isSetCmpd()) {
ln.unsetCmpd();
}
} else {
STCompoundLine.Enum xCmpd;
switch (compound) {
default:
case SINGLE:
xCmpd = STCompoundLine.SNG;
break;
case DOUBLE:
xCmpd = STCompoundLine.DBL;
break;
case THICK_THIN:
xCmpd = STCompoundLine.THICK_THIN;
break;
case THIN_THICK:
xCmpd = STCompoundLine.THIN_THICK;
break;
case TRIPLE:
xCmpd = STCompoundLine.TRI;
break;
}
ln.setCmpd(xCmpd);
}
}
/**
* @return the line compound
*/
@SuppressWarnings("WeakerAccess")
public LineCompound getLineCompound() {
PropertyFetcher<Integer> fetcher = new PropertyFetcher<Integer>() {
@Override
public boolean fetch(XSLFShape shape) {
CTLineProperties ln = getLn(shape, false);
if (ln != null) {
STCompoundLine.Enum stCmpd = ln.getCmpd();
if (stCmpd != null) {
setValue(stCmpd.intValue());
return true;
}
}
return false;
}
};
fetchShapeProperty(fetcher);
Integer cmpd = fetcher.getValue();
if (cmpd == null) {
CTLineProperties defaultLn = getDefaultLineProperties();
if (defaultLn != null && defaultLn.isSetCmpd()) {
switch (defaultLn.getCmpd().intValue()) {
default:
case STCompoundLine.INT_SNG:
return LineCompound.SINGLE;
case STCompoundLine.INT_DBL:
return LineCompound.DOUBLE;
case STCompoundLine.INT_THICK_THIN:
return LineCompound.THICK_THIN;
case STCompoundLine.INT_THIN_THICK:
return LineCompound.THIN_THICK;
case STCompoundLine.INT_TRI:
return LineCompound.TRIPLE;
}
}
}
return null;
}
/**
*
* @param dash a preset line dashing scheme to stroke thr shape outline
*/
@SuppressWarnings("WeakerAccess")
public void setLineDash(LineDash dash) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
if (dash == null) {
if (ln.isSetPrstDash()) {
ln.unsetPrstDash();
}
} else {
CTPresetLineDashProperties ldp = ln.isSetPrstDash() ? ln.getPrstDash() : ln.addNewPrstDash();
ldp.setVal(STPresetLineDashVal.Enum.forInt(dash.ooxmlId));
}
}
/**
* @return a preset line dashing scheme to stroke the shape outline
*/
@SuppressWarnings("WeakerAccess")
public LineDash getLineDash() {
PropertyFetcher<LineDash> fetcher = new PropertyFetcher<LineDash>() {
@Override
public boolean fetch(XSLFShape shape) {
CTLineProperties ln = getLn(shape, false);
if (ln == null || !ln.isSetPrstDash()) {
return false;
}
setValue(LineDash.fromOoxmlId(ln.getPrstDash().getVal().intValue()));
return true;
}
};
fetchShapeProperty(fetcher);
LineDash dash = fetcher.getValue();
if (dash == null) {
CTLineProperties defaultLn = getDefaultLineProperties();
if (defaultLn != null && defaultLn.isSetPrstDash()) {
dash = LineDash.fromOoxmlId(defaultLn.getPrstDash().getVal().intValue());
}
}
return dash;
}
/**
*
* @param cap the line end cap style
*/
@SuppressWarnings("WeakerAccess")
public void setLineCap(LineCap cap) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
if (cap == null) {
if (ln.isSetCap()) {
ln.unsetCap();
}
} else {
ln.setCap(STLineCap.Enum.forInt(cap.ooxmlId));
}
}
/**
*
* @return the line end cap style
*/
@SuppressWarnings("WeakerAccess")
public LineCap getLineCap() {
PropertyFetcher<LineCap> fetcher = new PropertyFetcher<LineCap>() {
@Override
public boolean fetch(XSLFShape shape) {
CTLineProperties ln = getLn(shape, false);
if (ln != null && ln.isSetCap()) {
setValue(LineCap.fromOoxmlId(ln.getCap().intValue()));
return true;
}
return false;
}
};
fetchShapeProperty(fetcher);
LineCap cap = fetcher.getValue();
if (cap == null) {
CTLineProperties defaultLn = getDefaultLineProperties();
if (defaultLn != null && defaultLn.isSetCap()) {
cap = LineCap.fromOoxmlId(defaultLn.getCap().intValue());
}
}
return cap;
}
@Override
public void setFillColor(Color color) {
XSLFFillProperties fp = XSLFPropertiesDelegate.getFillDelegate(getShapeProperties());
if (fp == null) {
return;
}
if (color == null) {
if (fp.isSetSolidFill()) {
fp.unsetSolidFill();
}
if (fp.isSetGradFill()) {
fp.unsetGradFill();
}
if (fp.isSetPattFill()) {
fp.unsetGradFill();
}
if (fp.isSetBlipFill()) {
fp.unsetBlipFill();
}
if (!fp.isSetNoFill()) {
fp.addNewNoFill();
}
} else {
if (fp.isSetNoFill()) {
fp.unsetNoFill();
}
CTSolidColorFillProperties fill = fp.isSetSolidFill() ? fp.getSolidFill() : fp.addNewSolidFill();
XSLFColor col = new XSLFColor(fill, getSheet().getTheme(), fill.getSchemeClr(), getSheet());
col.setColor(color);
}
}
@Override
public Color getFillColor() {
PaintStyle ps = getFillPaint();
if (ps instanceof SolidPaint) {
return DrawPaint.applyColorTransform(((SolidPaint)ps).getSolidColor());
}
return null;
}
/**
* @return shadow of this shape or null if shadow is disabled
*/
@Override
public XSLFShadow getShadow() {
PropertyFetcher<CTOuterShadowEffect> fetcher = new PropertyFetcher<CTOuterShadowEffect>() {
@Override
public boolean fetch(XSLFShape shape) {
XSLFEffectProperties ep = XSLFPropertiesDelegate.getEffectDelegate(shape.getShapeProperties());
if (ep != null && ep.isSetEffectLst()) {
CTOuterShadowEffect obj = ep.getEffectLst().getOuterShdw();
setValue(obj == null ? NO_SHADOW : obj);
return true;
}
return false;
}
};
fetchShapeProperty(fetcher);
CTOuterShadowEffect obj = fetcher.getValue();
if (obj == null) {
// fill color was not found, check if it is defined in the theme
CTShapeStyle style = getSpStyle();
if (style != null && style.getEffectRef() != null) {
// 1-based index of a shadow style within the style matrix
int idx = (int) style.getEffectRef().getIdx();
if(idx != 0) {
CTStyleMatrix styleMatrix = getSheet().getTheme().getXmlObject().getThemeElements().getFmtScheme();
CTEffectStyleItem ef = styleMatrix.getEffectStyleLst().getEffectStyleArray(idx - 1);
obj = ef.getEffectLst().getOuterShdw();
}
}
}
return (obj == null || obj == NO_SHADOW) ? null : new XSLFShadow(obj, this);
}
/**
* @return definition of the shape geometry
*/
@Override
public CustomGeometry getGeometry() {
XSLFGeometryProperties gp = XSLFPropertiesDelegate.getGeometryDelegate(getShapeProperties());
if (gp == null) {
return null;
}
CustomGeometry geom;
PresetGeometries dict = PresetGeometries.getInstance();
if(gp.isSetPrstGeom()){
String name = gp.getPrstGeom().getPrst().toString();
geom = dict.get(name);
if(geom == null) {
throw new IllegalStateException("Unknown shape geometry: " + name + ", available geometries are: " + dict.keySet());
}
} else if (gp.isSetCustGeom()){
geom = XSLFCustomGeometry.convertCustomGeometry(gp.getCustGeom());
} else {
geom = dict.get("rect");
}
return geom;
}
@Override
void copy(XSLFShape sh){
super.copy(sh);
XSLFSimpleShape s = (XSLFSimpleShape)sh;
Color srsSolidFill = s.getFillColor();
Color tgtSoliFill = getFillColor();
if(srsSolidFill != null && !srsSolidFill.equals(tgtSoliFill)){
setFillColor(srsSolidFill);
}
XSLFFillProperties fp = XSLFPropertiesDelegate.getFillDelegate(getShapeProperties());
if(fp != null && fp.isSetBlipFill()){
CTBlip blip = fp.getBlipFill().getBlip();
String blipId = blip.getEmbed();
String relId = getSheet().importBlip(blipId, s.getSheet());
blip.setEmbed(relId);
}
Color srcLineColor = s.getLineColor();
Color tgtLineColor = getLineColor();
if(srcLineColor != null && !srcLineColor.equals(tgtLineColor)) {
setLineColor(srcLineColor);
}
double srcLineWidth = s.getLineWidth();
double tgtLineWidth = getLineWidth();
if(srcLineWidth != tgtLineWidth) {
setLineWidth(srcLineWidth);
}
LineDash srcLineDash = s.getLineDash();
LineDash tgtLineDash = getLineDash();
if(srcLineDash != null && srcLineDash != tgtLineDash) {
setLineDash(srcLineDash);
}
LineCap srcLineCap = s.getLineCap();
LineCap tgtLineCap = getLineCap();
if(srcLineCap != null && srcLineCap != tgtLineCap) {
setLineCap(srcLineCap);
}
}
/**
* Specifies the line end decoration, such as a triangle or arrowhead.
*
* @param style the line end decoration style
*/
@SuppressWarnings("WeakerAccess")
public void setLineHeadDecoration(DecorationShape style) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
CTLineEndProperties lnEnd = ln.isSetHeadEnd() ? ln.getHeadEnd() : ln.addNewHeadEnd();
if (style == null) {
if (lnEnd.isSetType()) {
lnEnd.unsetType();
}
} else {
lnEnd.setType(STLineEndType.Enum.forInt(style.ooxmlId));
}
}
/**
* @return the line end decoration shape
*/
@SuppressWarnings("WeakerAccess")
public DecorationShape getLineHeadDecoration() {
CTLineProperties ln = getLn(this, false);
DecorationShape ds = DecorationShape.NONE;
if (ln != null && ln.isSetHeadEnd() && ln.getHeadEnd().isSetType()) {
ds = DecorationShape.fromOoxmlId(ln.getHeadEnd().getType().intValue());
}
return ds;
}
/**
* specifies decoration width of the head of a line.
*
* @param style the decoration width
*/
@SuppressWarnings("WeakerAccess")
public void setLineHeadWidth(DecorationSize style) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
CTLineEndProperties lnEnd = ln.isSetHeadEnd() ? ln.getHeadEnd() : ln.addNewHeadEnd();
if (style == null) {
if (lnEnd.isSetW()) {
lnEnd.unsetW();
}
} else {
lnEnd.setW(STLineEndWidth.Enum.forInt(style.ooxmlId));
}
}
/**
* @return the line end decoration width
*/
@SuppressWarnings("WeakerAccess")
public DecorationSize getLineHeadWidth() {
CTLineProperties ln = getLn(this, false);
DecorationSize ds = DecorationSize.MEDIUM;
if (ln != null && ln.isSetHeadEnd() && ln.getHeadEnd().isSetW()) {
ds = DecorationSize.fromOoxmlId(ln.getHeadEnd().getW().intValue());
}
return ds;
}
/**
* Specifies the line end width in relation to the line width.
*/
@SuppressWarnings("WeakerAccess")
public void setLineHeadLength(DecorationSize style) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
CTLineEndProperties lnEnd = ln.isSetHeadEnd() ? ln.getHeadEnd() : ln.addNewHeadEnd();
if (style == null) {
if (lnEnd.isSetLen()) {
lnEnd.unsetLen();
}
} else {
lnEnd.setLen(STLineEndLength.Enum.forInt(style.ooxmlId));
}
}
/**
* @return the line end decoration length
*/
@SuppressWarnings("WeakerAccess")
public DecorationSize getLineHeadLength() {
CTLineProperties ln = getLn(this, false);
DecorationSize ds = DecorationSize.MEDIUM;
if (ln != null && ln.isSetHeadEnd() && ln.getHeadEnd().isSetLen()) {
ds = DecorationSize.fromOoxmlId(ln.getHeadEnd().getLen().intValue());
}
return ds;
}
/**
* Specifies the line end decoration, such as a triangle or arrowhead.
*/
@SuppressWarnings("WeakerAccess")
public void setLineTailDecoration(DecorationShape style) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
CTLineEndProperties lnEnd = ln.isSetTailEnd() ? ln.getTailEnd() : ln.addNewTailEnd();
if (style == null) {
if (lnEnd.isSetType()) {
lnEnd.unsetType();
}
} else {
lnEnd.setType(STLineEndType.Enum.forInt(style.ooxmlId));
}
}
/**
* @return the line end decoration shape
*/
@SuppressWarnings("WeakerAccess")
public DecorationShape getLineTailDecoration() {
CTLineProperties ln = getLn(this, false);
DecorationShape ds = DecorationShape.NONE;
if (ln != null && ln.isSetTailEnd() && ln.getTailEnd().isSetType()) {
ds = DecorationShape.fromOoxmlId(ln.getTailEnd().getType().intValue());
}
return ds;
}
/**
* specifies decorations which can be added to the tail of a line.
*/
@SuppressWarnings("WeakerAccess")
public void setLineTailWidth(DecorationSize style) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
CTLineEndProperties lnEnd = ln.isSetTailEnd() ? ln.getTailEnd() : ln.addNewTailEnd();
if (style == null) {
if (lnEnd.isSetW()) {
lnEnd.unsetW();
}
} else {
lnEnd.setW(STLineEndWidth.Enum.forInt(style.ooxmlId));
}
}
/**
* @return the line end decoration width
*/
@SuppressWarnings("WeakerAccess")
public DecorationSize getLineTailWidth() {
CTLineProperties ln = getLn(this, false);
DecorationSize ds = DecorationSize.MEDIUM;
if (ln != null && ln.isSetTailEnd() && ln.getTailEnd().isSetW()) {
ds = DecorationSize.fromOoxmlId(ln.getTailEnd().getW().intValue());
}
return ds;
}
/**
* Specifies the line end width in relation to the line width.
*/
@SuppressWarnings("WeakerAccess")
public void setLineTailLength(DecorationSize style) {
CTLineProperties ln = getLn(this, true);
if (ln == null) {
return;
}
CTLineEndProperties lnEnd = ln.isSetTailEnd() ? ln.getTailEnd() : ln.addNewTailEnd();
if (style == null) {
if (lnEnd.isSetLen()) {
lnEnd.unsetLen();
}
} else {
lnEnd.setLen(STLineEndLength.Enum.forInt(style.ooxmlId));
}
}
/**
* @return the line end decoration length
*/
@SuppressWarnings("WeakerAccess")
public DecorationSize getLineTailLength() {
CTLineProperties ln = getLn(this, false);
DecorationSize ds = DecorationSize.MEDIUM;
if (ln != null && ln.isSetTailEnd() && ln.getTailEnd().isSetLen()) {
ds = DecorationSize.fromOoxmlId(ln.getTailEnd().getLen().intValue());
}
return ds;
}
@Override
public Guide getAdjustValue(String name) {
XSLFGeometryProperties gp = XSLFPropertiesDelegate.getGeometryDelegate(getShapeProperties());
if (gp != null && gp.isSetPrstGeom() && gp.getPrstGeom().isSetAvLst()) {
for (CTGeomGuide g : gp.getPrstGeom().getAvLst().getGdArray()) {
if (g.getName().equals(name)) {
Guide gd = new Guide();
gd.setName(g.getName());
gd.setFmla(g.getFmla());
return gd;
}
}
}
return null;
}
@Override
public LineDecoration getLineDecoration() {
return new LineDecoration() {
@Override
public DecorationShape getHeadShape() {
return getLineHeadDecoration();
}
@Override
public DecorationSize getHeadWidth() {
return getLineHeadWidth();
}
@Override
public DecorationSize getHeadLength() {
return getLineHeadLength();
}
@Override
public DecorationShape getTailShape() {
return getLineTailDecoration();
}
@Override
public DecorationSize getTailWidth() {
return getLineTailWidth();
}
@Override
public DecorationSize getTailLength() {
return getLineTailLength();
}
};
}
/**
* fetch shape fill as a java.awt.Paint
*
* @return either Color or GradientPaint or TexturePaint or null
*/
@Override
public FillStyle getFillStyle() {
return XSLFSimpleShape.this::getFillPaint;
}
@Override
public StrokeStyle getStrokeStyle() {
return new StrokeStyle() {
@Override
public PaintStyle getPaint() {
return XSLFSimpleShape.this.getLinePaint();
}
@Override
public LineCap getLineCap() {
return XSLFSimpleShape.this.getLineCap();
}
@Override
public LineDash getLineDash() {
return XSLFSimpleShape.this.getLineDash();
}
@Override
public double getLineWidth() {
return XSLFSimpleShape.this.getLineWidth();
}
@Override
public LineCompound getLineCompound() {
return XSLFSimpleShape.this.getLineCompound();
}
};
}
@Override
public void setStrokeStyle(Object... styles) {
if (styles.length == 0) {
// remove stroke
setLineColor(null);
return;
}
// TODO: handle PaintStyle
for (Object st : styles) {
if (st instanceof Number) {
setLineWidth(((Number)st).doubleValue());
} else if (st instanceof LineCap) {
setLineCap((LineCap)st);
} else if (st instanceof LineDash) {
setLineDash((LineDash)st);
} else if (st instanceof LineCompound) {
setLineCompound((LineCompound)st);
} else if (st instanceof Color) {
setLineColor((Color)st);
}
}
}
@Override
public XSLFHyperlink getHyperlink() {
CTNonVisualDrawingProps cNvPr = getCNvPr();
if (!cNvPr.isSetHlinkClick()) {
return null;
}
return new XSLFHyperlink(cNvPr.getHlinkClick(), getSheet());
}
@Override
public XSLFHyperlink createHyperlink() {
XSLFHyperlink hl = getHyperlink();
if (hl == null) {
CTNonVisualDrawingProps cNvPr = getCNvPr();
hl = new XSLFHyperlink(cNvPr.addNewHlinkClick(), getSheet());
}
return hl;
}
private static CTLineProperties getLn(XSLFShape shape, boolean create) {
XmlObject pr = shape.getShapeProperties();
if (!(pr instanceof CTShapeProperties)) {
LOG.atWarn().log("{} doesn't have line properties", shape.getClass());
return null;
}
CTShapeProperties spr = (CTShapeProperties)pr;
return (spr.isSetLn() || !create) ? spr.getLn() : spr.addNewLn();
}
}
|
googleapis/google-api-java-client-services | 36,248 | clients/google-api-services-workstations/v1/2.0.0/com/google/api/services/workstations/v1/model/WorkstationConfig.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.workstations.v1.model;
/**
* A workstation configuration resource in the Cloud Workstations API. Workstation configurations
* act as templates for workstations. The workstation configuration defines details such as the
* workstation virtual machine (VM) instance type, persistent storage, container image defining
* environment, which IDE or Code Editor to use, and more. Administrators and platform teams can
* also use [Identity and Access Management (IAM)](https://cloud.google.com/iam/docs/overview) rules
* to grant access to teams or to individual developers.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Workstations API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class WorkstationConfig extends com.google.api.client.json.GenericJson {
/**
* Optional. A list of PortRanges specifying single ports or ranges of ports that are externally
* accessible in the workstation. Allowed ports must be one of 22, 80, or within range 1024-65535.
* If not specified defaults to ports 22, 80, and ports 1024-65535.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PortRange> allowedPorts;
static {
// hack to force ProGuard to consider PortRange used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(PortRange.class);
}
/**
* Optional. Client-specified annotations.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> annotations;
/**
* Output only. Status conditions describing the workstation configuration's current state.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Status> conditions;
static {
// hack to force ProGuard to consider Status used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Status.class);
}
/**
* Optional. Container that runs upon startup for each workstation using this workstation
* configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Container container;
/**
* Output only. Time when this workstation configuration was created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Output only. Whether this workstation configuration is in degraded mode, in which case it may
* require user action to restore full functionality. The conditions field contains detailed
* information about the status of the configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean degraded;
/**
* Output only. Time when this workstation configuration was soft-deleted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String deleteTime;
/**
* Optional. Disables support for plain TCP connections in the workstation. By default the service
* supports TCP connections through a websocket relay. Setting this option to true disables that
* relay, which prevents the usage of services that require plain TCP connections, such as SSH.
* When enabled, all communication must occur over HTTPS or WSS.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean disableTcpConnections;
/**
* Optional. Human-readable name for this workstation configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* Optional. Whether to enable Linux `auditd` logging on the workstation. When enabled, a
* service_account must also be specified that has `roles/logging.logWriter` and
* `roles/monitoring.metricWriter` on the project. Operating system audit logging is distinct from
* [Cloud Audit Logs](https://cloud.google.com/workstations/docs/audit-logging) and [Container
* output logging](https://cloud.google.com/workstations/docs/container-output-logging#overview).
* Operating system audit logs are available in the [Cloud
* Logging](https://cloud.google.com/logging/docs) console by querying:
* resource.type="gce_instance" log_name:"/logs/linux-auditd"
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableAuditAgent;
/**
* Immutable. Encrypts resources of this workstation configuration using a customer-managed
* encryption key (CMEK). If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If this field is not set, the disks
* are encrypted using a generated key. Customer-managed encryption keys do not protect disk
* metadata. If the customer-managed encryption key is rotated, when the workstation instance is
* stopped, the system attempts to recreate the persistent disk with the new version of the key.
* Be sure to keep older versions of the key until the persistent disk is recreated. Otherwise,
* data on the persistent disk might be lost. If the encryption key is revoked, the workstation
* session automatically stops within 7 hours. Immutable after the workstation configuration is
* created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey encryptionKey;
/**
* Optional. Ephemeral directories which won't persist across workstation sessions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<EphemeralDirectory> ephemeralDirectories;
static {
// hack to force ProGuard to consider EphemeralDirectory used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(EphemeralDirectory.class);
}
/**
* Optional. Checksum computed by the server. May be sent on update and delete requests to make
* sure that the client has an up-to-date value before proceeding.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String etag;
/**
* Optional. Grant creator of a workstation `roles/workstations.policyAdmin` role along with
* `roles/workstations.user` role on the workstation created by them. This allows workstation
* users to share access to either their entire workstation, or individual ports. Defaults to
* false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean grantWorkstationAdminRoleOnCreate;
/**
* Optional. Runtime host for the workstation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Host host;
/**
* Optional. Number of seconds to wait before automatically stopping a workstation after it last
* received user traffic. A value of `"0s"` indicates that Cloud Workstations VMs created with
* this configuration should never time out due to idleness. Provide
* [duration](https://developers.google.com/protocol-
* buffers/docs/reference/google.protobuf#duration) terminated by `s` for seconds—for example,
* `"7200s"` (2 hours). The default is `"1200s"` (20 minutes).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String idleTimeout;
/**
* Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied
* to the workstation configuration and that are also propagated to the underlying Compute Engine
* resources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Optional. Maximum number of workstations under this configuration a user can have
* `workstations.workstation.use` permission on. Only enforced on CreateWorkstation API calls on
* the user issuing the API request. Can be overridden by: - granting a user
* workstations.workstationConfigs.exemptMaxUsableWorkstationLimit permission, or - having a user
* with that permission create a workstation and granting another user
* `workstations.workstation.use` permission on that workstation. If not specified, defaults to
* `0`, which indicates unlimited.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxUsableWorkstations;
/**
* Identifier. Full name of this workstation configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Optional. Directories to persist across workstation sessions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PersistentDirectory> persistentDirectories;
static {
// hack to force ProGuard to consider PersistentDirectory used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(PersistentDirectory.class);
}
/**
* Optional. Readiness checks to perform when starting a workstation using this workstation
* configuration. Mark a workstation as running only after all specified readiness checks return
* 200 status codes.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ReadinessCheck> readinessChecks;
static {
// hack to force ProGuard to consider ReadinessCheck used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ReadinessCheck.class);
}
/**
* Output only. Indicates whether this workstation configuration is currently being updated to
* match its intended state.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean reconciling;
/**
* Optional. Immutable. Specifies the zones used to replicate the VM and disk resources within the
* region. If set, exactly two zones within the workstation cluster's region must be specified—for
* example, `['us-central1-a', 'us-central1-f']`. If this field is empty, two default zones within
* the region are used. Immutable after the workstation configuration is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> replicaZones;
/**
* Optional. Number of seconds that a workstation can run until it is automatically shut down. We
* recommend that workstations be shut down daily to reduce costs and so that security updates can
* be applied upon restart. The idle_timeout and running_timeout fields are independent of each
* other. Note that the running_timeout field shuts down VMs after the specified time, regardless
* of whether or not the VMs are idle. Provide duration terminated by `s` for seconds—for example,
* `"54000s"` (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates that
* workstations using this configuration should never time out. If encryption_key is set, it must
* be greater than `"0s"` and less than `"86400s"` (24 hours). Warning: A value of `"0s"`
* indicates that Cloud Workstations VMs created with this configuration have no maximum running
* time. This is strongly discouraged because you incur costs and will not pick up security
* updates.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String runningTimeout;
/**
* Output only. A system-assigned unique identifier for this workstation configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String uid;
/**
* Output only. Time when this workstation configuration was most recently updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateTime;
/**
* Optional. A list of PortRanges specifying single ports or ranges of ports that are externally
* accessible in the workstation. Allowed ports must be one of 22, 80, or within range 1024-65535.
* If not specified defaults to ports 22, 80, and ports 1024-65535.
* @return value or {@code null} for none
*/
public java.util.List<PortRange> getAllowedPorts() {
return allowedPorts;
}
/**
* Optional. A list of PortRanges specifying single ports or ranges of ports that are externally
* accessible in the workstation. Allowed ports must be one of 22, 80, or within range 1024-65535.
* If not specified defaults to ports 22, 80, and ports 1024-65535.
* @param allowedPorts allowedPorts or {@code null} for none
*/
public WorkstationConfig setAllowedPorts(java.util.List<PortRange> allowedPorts) {
this.allowedPorts = allowedPorts;
return this;
}
/**
* Optional. Client-specified annotations.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getAnnotations() {
return annotations;
}
/**
* Optional. Client-specified annotations.
* @param annotations annotations or {@code null} for none
*/
public WorkstationConfig setAnnotations(java.util.Map<String, java.lang.String> annotations) {
this.annotations = annotations;
return this;
}
/**
* Output only. Status conditions describing the workstation configuration's current state.
* @return value or {@code null} for none
*/
public java.util.List<Status> getConditions() {
return conditions;
}
/**
* Output only. Status conditions describing the workstation configuration's current state.
* @param conditions conditions or {@code null} for none
*/
public WorkstationConfig setConditions(java.util.List<Status> conditions) {
this.conditions = conditions;
return this;
}
/**
* Optional. Container that runs upon startup for each workstation using this workstation
* configuration.
* @return value or {@code null} for none
*/
public Container getContainer() {
return container;
}
/**
* Optional. Container that runs upon startup for each workstation using this workstation
* configuration.
* @param container container or {@code null} for none
*/
public WorkstationConfig setContainer(Container container) {
this.container = container;
return this;
}
/**
* Output only. Time when this workstation configuration was created.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. Time when this workstation configuration was created.
* @param createTime createTime or {@code null} for none
*/
public WorkstationConfig setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Output only. Whether this workstation configuration is in degraded mode, in which case it may
* require user action to restore full functionality. The conditions field contains detailed
* information about the status of the configuration.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDegraded() {
return degraded;
}
/**
* Output only. Whether this workstation configuration is in degraded mode, in which case it may
* require user action to restore full functionality. The conditions field contains detailed
* information about the status of the configuration.
* @param degraded degraded or {@code null} for none
*/
public WorkstationConfig setDegraded(java.lang.Boolean degraded) {
this.degraded = degraded;
return this;
}
/**
* Output only. Time when this workstation configuration was soft-deleted.
* @return value or {@code null} for none
*/
public String getDeleteTime() {
return deleteTime;
}
/**
* Output only. Time when this workstation configuration was soft-deleted.
* @param deleteTime deleteTime or {@code null} for none
*/
public WorkstationConfig setDeleteTime(String deleteTime) {
this.deleteTime = deleteTime;
return this;
}
/**
* Optional. Disables support for plain TCP connections in the workstation. By default the service
* supports TCP connections through a websocket relay. Setting this option to true disables that
* relay, which prevents the usage of services that require plain TCP connections, such as SSH.
* When enabled, all communication must occur over HTTPS or WSS.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDisableTcpConnections() {
return disableTcpConnections;
}
/**
* Optional. Disables support for plain TCP connections in the workstation. By default the service
* supports TCP connections through a websocket relay. Setting this option to true disables that
* relay, which prevents the usage of services that require plain TCP connections, such as SSH.
* When enabled, all communication must occur over HTTPS or WSS.
* @param disableTcpConnections disableTcpConnections or {@code null} for none
*/
public WorkstationConfig setDisableTcpConnections(java.lang.Boolean disableTcpConnections) {
this.disableTcpConnections = disableTcpConnections;
return this;
}
/**
* Optional. Human-readable name for this workstation configuration.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* Optional. Human-readable name for this workstation configuration.
* @param displayName displayName or {@code null} for none
*/
public WorkstationConfig setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* Optional. Whether to enable Linux `auditd` logging on the workstation. When enabled, a
* service_account must also be specified that has `roles/logging.logWriter` and
* `roles/monitoring.metricWriter` on the project. Operating system audit logging is distinct from
* [Cloud Audit Logs](https://cloud.google.com/workstations/docs/audit-logging) and [Container
* output logging](https://cloud.google.com/workstations/docs/container-output-logging#overview).
* Operating system audit logs are available in the [Cloud
* Logging](https://cloud.google.com/logging/docs) console by querying:
* resource.type="gce_instance" log_name:"/logs/linux-auditd"
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableAuditAgent() {
return enableAuditAgent;
}
/**
* Optional. Whether to enable Linux `auditd` logging on the workstation. When enabled, a
* service_account must also be specified that has `roles/logging.logWriter` and
* `roles/monitoring.metricWriter` on the project. Operating system audit logging is distinct from
* [Cloud Audit Logs](https://cloud.google.com/workstations/docs/audit-logging) and [Container
* output logging](https://cloud.google.com/workstations/docs/container-output-logging#overview).
* Operating system audit logs are available in the [Cloud
* Logging](https://cloud.google.com/logging/docs) console by querying:
* resource.type="gce_instance" log_name:"/logs/linux-auditd"
* @param enableAuditAgent enableAuditAgent or {@code null} for none
*/
public WorkstationConfig setEnableAuditAgent(java.lang.Boolean enableAuditAgent) {
this.enableAuditAgent = enableAuditAgent;
return this;
}
/**
* Immutable. Encrypts resources of this workstation configuration using a customer-managed
* encryption key (CMEK). If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If this field is not set, the disks
* are encrypted using a generated key. Customer-managed encryption keys do not protect disk
* metadata. If the customer-managed encryption key is rotated, when the workstation instance is
* stopped, the system attempts to recreate the persistent disk with the new version of the key.
* Be sure to keep older versions of the key until the persistent disk is recreated. Otherwise,
* data on the persistent disk might be lost. If the encryption key is revoked, the workstation
* session automatically stops within 7 hours. Immutable after the workstation configuration is
* created.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getEncryptionKey() {
return encryptionKey;
}
/**
* Immutable. Encrypts resources of this workstation configuration using a customer-managed
* encryption key (CMEK). If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If this field is not set, the disks
* are encrypted using a generated key. Customer-managed encryption keys do not protect disk
* metadata. If the customer-managed encryption key is rotated, when the workstation instance is
* stopped, the system attempts to recreate the persistent disk with the new version of the key.
* Be sure to keep older versions of the key until the persistent disk is recreated. Otherwise,
* data on the persistent disk might be lost. If the encryption key is revoked, the workstation
* session automatically stops within 7 hours. Immutable after the workstation configuration is
* created.
* @param encryptionKey encryptionKey or {@code null} for none
*/
public WorkstationConfig setEncryptionKey(CustomerEncryptionKey encryptionKey) {
this.encryptionKey = encryptionKey;
return this;
}
/**
* Optional. Ephemeral directories which won't persist across workstation sessions.
* @return value or {@code null} for none
*/
public java.util.List<EphemeralDirectory> getEphemeralDirectories() {
return ephemeralDirectories;
}
/**
* Optional. Ephemeral directories which won't persist across workstation sessions.
* @param ephemeralDirectories ephemeralDirectories or {@code null} for none
*/
public WorkstationConfig setEphemeralDirectories(java.util.List<EphemeralDirectory> ephemeralDirectories) {
this.ephemeralDirectories = ephemeralDirectories;
return this;
}
/**
* Optional. Checksum computed by the server. May be sent on update and delete requests to make
* sure that the client has an up-to-date value before proceeding.
* @return value or {@code null} for none
*/
public java.lang.String getEtag() {
return etag;
}
/**
* Optional. Checksum computed by the server. May be sent on update and delete requests to make
* sure that the client has an up-to-date value before proceeding.
* @param etag etag or {@code null} for none
*/
public WorkstationConfig setEtag(java.lang.String etag) {
this.etag = etag;
return this;
}
/**
* Optional. Grant creator of a workstation `roles/workstations.policyAdmin` role along with
* `roles/workstations.user` role on the workstation created by them. This allows workstation
* users to share access to either their entire workstation, or individual ports. Defaults to
* false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getGrantWorkstationAdminRoleOnCreate() {
return grantWorkstationAdminRoleOnCreate;
}
/**
* Optional. Grant creator of a workstation `roles/workstations.policyAdmin` role along with
* `roles/workstations.user` role on the workstation created by them. This allows workstation
* users to share access to either their entire workstation, or individual ports. Defaults to
* false.
* @param grantWorkstationAdminRoleOnCreate grantWorkstationAdminRoleOnCreate or {@code null} for none
*/
public WorkstationConfig setGrantWorkstationAdminRoleOnCreate(java.lang.Boolean grantWorkstationAdminRoleOnCreate) {
this.grantWorkstationAdminRoleOnCreate = grantWorkstationAdminRoleOnCreate;
return this;
}
/**
* Optional. Runtime host for the workstation.
* @return value or {@code null} for none
*/
public Host getHost() {
return host;
}
/**
* Optional. Runtime host for the workstation.
* @param host host or {@code null} for none
*/
public WorkstationConfig setHost(Host host) {
this.host = host;
return this;
}
/**
* Optional. Number of seconds to wait before automatically stopping a workstation after it last
* received user traffic. A value of `"0s"` indicates that Cloud Workstations VMs created with
* this configuration should never time out due to idleness. Provide
* [duration](https://developers.google.com/protocol-
* buffers/docs/reference/google.protobuf#duration) terminated by `s` for seconds—for example,
* `"7200s"` (2 hours). The default is `"1200s"` (20 minutes).
* @return value or {@code null} for none
*/
public String getIdleTimeout() {
return idleTimeout;
}
/**
* Optional. Number of seconds to wait before automatically stopping a workstation after it last
* received user traffic. A value of `"0s"` indicates that Cloud Workstations VMs created with
* this configuration should never time out due to idleness. Provide
* [duration](https://developers.google.com/protocol-
* buffers/docs/reference/google.protobuf#duration) terminated by `s` for seconds—for example,
* `"7200s"` (2 hours). The default is `"1200s"` (20 minutes).
* @param idleTimeout idleTimeout or {@code null} for none
*/
public WorkstationConfig setIdleTimeout(String idleTimeout) {
this.idleTimeout = idleTimeout;
return this;
}
/**
* Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied
* to the workstation configuration and that are also propagated to the underlying Compute Engine
* resources.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Optional. [Labels](https://cloud.google.com/workstations/docs/label-resources) that are applied
* to the workstation configuration and that are also propagated to the underlying Compute Engine
* resources.
* @param labels labels or {@code null} for none
*/
public WorkstationConfig setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Optional. Maximum number of workstations under this configuration a user can have
* `workstations.workstation.use` permission on. Only enforced on CreateWorkstation API calls on
* the user issuing the API request. Can be overridden by: - granting a user
* workstations.workstationConfigs.exemptMaxUsableWorkstationLimit permission, or - having a user
* with that permission create a workstation and granting another user
* `workstations.workstation.use` permission on that workstation. If not specified, defaults to
* `0`, which indicates unlimited.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxUsableWorkstations() {
return maxUsableWorkstations;
}
/**
* Optional. Maximum number of workstations under this configuration a user can have
* `workstations.workstation.use` permission on. Only enforced on CreateWorkstation API calls on
* the user issuing the API request. Can be overridden by: - granting a user
* workstations.workstationConfigs.exemptMaxUsableWorkstationLimit permission, or - having a user
* with that permission create a workstation and granting another user
* `workstations.workstation.use` permission on that workstation. If not specified, defaults to
* `0`, which indicates unlimited.
* @param maxUsableWorkstations maxUsableWorkstations or {@code null} for none
*/
public WorkstationConfig setMaxUsableWorkstations(java.lang.Integer maxUsableWorkstations) {
this.maxUsableWorkstations = maxUsableWorkstations;
return this;
}
/**
* Identifier. Full name of this workstation configuration.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Identifier. Full name of this workstation configuration.
* @param name name or {@code null} for none
*/
public WorkstationConfig setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Optional. Directories to persist across workstation sessions.
* @return value or {@code null} for none
*/
public java.util.List<PersistentDirectory> getPersistentDirectories() {
return persistentDirectories;
}
/**
* Optional. Directories to persist across workstation sessions.
* @param persistentDirectories persistentDirectories or {@code null} for none
*/
public WorkstationConfig setPersistentDirectories(java.util.List<PersistentDirectory> persistentDirectories) {
this.persistentDirectories = persistentDirectories;
return this;
}
/**
* Optional. Readiness checks to perform when starting a workstation using this workstation
* configuration. Mark a workstation as running only after all specified readiness checks return
* 200 status codes.
* @return value or {@code null} for none
*/
public java.util.List<ReadinessCheck> getReadinessChecks() {
return readinessChecks;
}
/**
* Optional. Readiness checks to perform when starting a workstation using this workstation
* configuration. Mark a workstation as running only after all specified readiness checks return
* 200 status codes.
* @param readinessChecks readinessChecks or {@code null} for none
*/
public WorkstationConfig setReadinessChecks(java.util.List<ReadinessCheck> readinessChecks) {
this.readinessChecks = readinessChecks;
return this;
}
/**
* Output only. Indicates whether this workstation configuration is currently being updated to
* match its intended state.
* @return value or {@code null} for none
*/
public java.lang.Boolean getReconciling() {
return reconciling;
}
/**
* Output only. Indicates whether this workstation configuration is currently being updated to
* match its intended state.
* @param reconciling reconciling or {@code null} for none
*/
public WorkstationConfig setReconciling(java.lang.Boolean reconciling) {
this.reconciling = reconciling;
return this;
}
/**
* Optional. Immutable. Specifies the zones used to replicate the VM and disk resources within the
* region. If set, exactly two zones within the workstation cluster's region must be specified—for
* example, `['us-central1-a', 'us-central1-f']`. If this field is empty, two default zones within
* the region are used. Immutable after the workstation configuration is created.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getReplicaZones() {
return replicaZones;
}
/**
* Optional. Immutable. Specifies the zones used to replicate the VM and disk resources within the
* region. If set, exactly two zones within the workstation cluster's region must be specified—for
* example, `['us-central1-a', 'us-central1-f']`. If this field is empty, two default zones within
* the region are used. Immutable after the workstation configuration is created.
* @param replicaZones replicaZones or {@code null} for none
*/
public WorkstationConfig setReplicaZones(java.util.List<java.lang.String> replicaZones) {
this.replicaZones = replicaZones;
return this;
}
/**
* Optional. Number of seconds that a workstation can run until it is automatically shut down. We
* recommend that workstations be shut down daily to reduce costs and so that security updates can
* be applied upon restart. The idle_timeout and running_timeout fields are independent of each
* other. Note that the running_timeout field shuts down VMs after the specified time, regardless
* of whether or not the VMs are idle. Provide duration terminated by `s` for seconds—for example,
* `"54000s"` (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates that
* workstations using this configuration should never time out. If encryption_key is set, it must
* be greater than `"0s"` and less than `"86400s"` (24 hours). Warning: A value of `"0s"`
* indicates that Cloud Workstations VMs created with this configuration have no maximum running
* time. This is strongly discouraged because you incur costs and will not pick up security
* updates.
* @return value or {@code null} for none
*/
public String getRunningTimeout() {
return runningTimeout;
}
/**
* Optional. Number of seconds that a workstation can run until it is automatically shut down. We
* recommend that workstations be shut down daily to reduce costs and so that security updates can
* be applied upon restart. The idle_timeout and running_timeout fields are independent of each
* other. Note that the running_timeout field shuts down VMs after the specified time, regardless
* of whether or not the VMs are idle. Provide duration terminated by `s` for seconds—for example,
* `"54000s"` (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates that
* workstations using this configuration should never time out. If encryption_key is set, it must
* be greater than `"0s"` and less than `"86400s"` (24 hours). Warning: A value of `"0s"`
* indicates that Cloud Workstations VMs created with this configuration have no maximum running
* time. This is strongly discouraged because you incur costs and will not pick up security
* updates.
* @param runningTimeout runningTimeout or {@code null} for none
*/
public WorkstationConfig setRunningTimeout(String runningTimeout) {
this.runningTimeout = runningTimeout;
return this;
}
/**
* Output only. A system-assigned unique identifier for this workstation configuration.
* @return value or {@code null} for none
*/
public java.lang.String getUid() {
return uid;
}
/**
* Output only. A system-assigned unique identifier for this workstation configuration.
* @param uid uid or {@code null} for none
*/
public WorkstationConfig setUid(java.lang.String uid) {
this.uid = uid;
return this;
}
/**
* Output only. Time when this workstation configuration was most recently updated.
* @return value or {@code null} for none
*/
public String getUpdateTime() {
return updateTime;
}
/**
* Output only. Time when this workstation configuration was most recently updated.
* @param updateTime updateTime or {@code null} for none
*/
public WorkstationConfig setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
}
@Override
public WorkstationConfig set(String fieldName, Object value) {
return (WorkstationConfig) super.set(fieldName, value);
}
@Override
public WorkstationConfig clone() {
return (WorkstationConfig) super.clone();
}
}
|
openjdk/jdk8 | 36,165 | corba/src/share/classes/com/sun/corba/se/impl/util/RepositoryId.java | /*
* Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Licensed Materials - Property of IBM
* RMI-IIOP v1.0
* Copyright IBM Corp. 1998 1999 All Rights Reserved
*
*/
package com.sun.corba.se.impl.util;
import java.util.StringTokenizer;
import java.util.Hashtable;
import java.io.IOException;
import java.lang.reflect.Method;
// Imports for using codebase URL to load class
import java.net.MalformedURLException;
import org.omg.CORBA.portable.ValueBase;
import org.omg.CORBA.portable.IDLEntity;
//d11638 files in the same package, therefore remove their reference
//import com.sun.corba.se.impl.util.JDKBridge;
//import com.sun.corba.se.impl.util.IdentityHashtable;
import com.sun.corba.se.impl.io.ObjectStreamClass;
import javax.rmi.CORBA.Util;
public class RepositoryId {
// Legal IDL Identifier characters (1 = legal). Note
// that '.' (2E) is marked as legal even though it is
// not legal in IDL. This allows us to treat a fully
// qualified Java name with '.' package separators
// uniformly, and is safe because that is the only
// legal use of '.' in a Java name.
private static final byte[] IDL_IDENTIFIER_CHARS = {
// 0 1 2 3 4 5 6 7 8 9 a b c d e f
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0, // 00-0f
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0, // 10-1f
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,1,0, // 20-2f
1,1,1,1, 1,1,1,1, 1,1,0,0, 0,0,0,0, // 30-3f
0,1,1,1, 1,1,1,1, 1,1,1,1, 1,1,1,1, // 40-4f
1,1,1,1, 1,1,1,1, 1,1,1,0, 0,0,0,1, // 50-5f
0,1,1,1, 1,1,1,1, 1,1,1,1, 1,1,1,1, // 60-6f
1,1,1,1, 1,1,1,1, 1,1,1,0, 0,0,0,0, // 70-7f
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0, // 80-8f
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0, // 90-9f
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0, // a0-af
0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0, // b0-bf
1,1,1,1, 1,1,1,1, 1,1,1,1, 1,1,1,1, // c0-cf
0,1,1,1, 1,1,1,0, 1,1,1,1, 1,0,0,1, // d0-df
1,1,1,1, 1,1,1,1, 1,1,1,1, 1,1,1,1, // e0-ef
0,1,1,1, 1,1,1,0, 1,1,1,1, 1,0,0,1, // f0-ff
};
private static final long serialVersionUID = 123456789L;
private static String defaultServerURL = null;
private static boolean useCodebaseOnly = false;
static {
if (defaultServerURL == null)
defaultServerURL = (String)JDKBridge.getLocalCodebase();
useCodebaseOnly = JDKBridge.useCodebaseOnly();
}
private static IdentityHashtable classToRepStr = new IdentityHashtable();
private static IdentityHashtable classIDLToRepStr = new IdentityHashtable();
private static IdentityHashtable classSeqToRepStr = new IdentityHashtable();
private static final IdentityHashtable repStrToByteArray = new IdentityHashtable();
private static Hashtable repStrToClass = new Hashtable();
private String repId = null;
private boolean isSupportedFormat = true;
private String typeString = null;
private String versionString = null;
private boolean isSequence = false;
private boolean isRMIValueType = false;
private boolean isIDLType = false;
private String completeClassName = null;
private String unqualifiedName = null;
private String definedInId = null;
private Class clazz = null;
private String suid = null, actualSuid = null;
private long suidLong = ObjectStreamClass.kDefaultUID, actualSuidLong = ObjectStreamClass.kDefaultUID;
// Repository ID fragments
private static final String kSequenceKeyword = "seq";
private static final String kValuePrefix = "RMI:";
private static final String kIDLPrefix = "IDL:";
private static final String kIDLNamePrefix = "omg.org/";
private static final String kIDLClassnamePrefix = "org.omg.";
private static final String kSequencePrefix = "[";
private static final String kCORBAPrefix = "CORBA/";
private static final String kArrayPrefix = kValuePrefix + kSequencePrefix + kCORBAPrefix;
private static final int kValuePrefixLength = kValuePrefix.length();
private static final int kIDLPrefixLength = kIDLPrefix.length();
private static final int kSequencePrefixLength = kSequencePrefix.length();
private static final String kInterfaceHashCode = ":0000000000000000";
private static final String kInterfaceOnlyHashStr = "0000000000000000";
private static final String kExternalizableHashStr = "0000000000000001";
// Value tag utility methods and constants
public static final int kInitialValueTag= 0x7fffff00;
public static final int kNoTypeInfo = 0;
public static final int kSingleRepTypeInfo = 0x02;
public static final int kPartialListTypeInfo = 0x06;
public static final int kChunkedMask = 0x08;
public static final int kPreComputed_StandardRMIUnchunked = RepositoryId.computeValueTag(false, RepositoryId.kSingleRepTypeInfo, false);
public static final int kPreComputed_CodeBaseRMIUnchunked = RepositoryId.computeValueTag(true, RepositoryId.kSingleRepTypeInfo, false);
public static final int kPreComputed_StandardRMIChunked = RepositoryId.computeValueTag(false, RepositoryId.kSingleRepTypeInfo, true);
public static final int kPreComputed_CodeBaseRMIChunked = RepositoryId.computeValueTag(true, RepositoryId.kSingleRepTypeInfo, true);
public static final int kPreComputed_StandardRMIUnchunked_NoRep = RepositoryId.computeValueTag(false, RepositoryId.kNoTypeInfo, false);
public static final int kPreComputed_CodeBaseRMIUnchunked_NoRep = RepositoryId.computeValueTag(true, RepositoryId.kNoTypeInfo, false);
public static final int kPreComputed_StandardRMIChunked_NoRep = RepositoryId.computeValueTag(false, RepositoryId.kNoTypeInfo, true);
public static final int kPreComputed_CodeBaseRMIChunked_NoRep = RepositoryId.computeValueTag(true, RepositoryId.kNoTypeInfo, true);
// Public, well known repository IDs
// _REVISIT_ : A table structure with a good search routine for all of this
// would be more efficient and easier to maintain...
// String
public static final String kWStringValueVersion = "1.0";
public static final String kWStringValueHash = ":"+kWStringValueVersion;
public static final String kWStringStubValue = "WStringValue";
public static final String kWStringTypeStr = "omg.org/CORBA/"+kWStringStubValue;
public static final String kWStringValueRepID = kIDLPrefix + kWStringTypeStr + kWStringValueHash;
// Any
public static final String kAnyRepID = kIDLPrefix + "omg.org/CORBA/Any";
// Class
// Anita4: convert to uppercase
public static final String kClassDescValueHash = ":" +
Long.toHexString(
ObjectStreamClass.getActualSerialVersionUID(javax.rmi.CORBA.ClassDesc.class)).toUpperCase() + ":" +
Long.toHexString(
ObjectStreamClass.getSerialVersionUID(javax.rmi.CORBA.ClassDesc.class)).toUpperCase();
public static final String kClassDescStubValue = "ClassDesc";
public static final String kClassDescTypeStr = "javax.rmi.CORBA."+kClassDescStubValue;
public static final String kClassDescValueRepID = kValuePrefix + kClassDescTypeStr + kClassDescValueHash;
// Object
public static final String kObjectValueHash = ":1.0";
public static final String kObjectStubValue = "Object";
// Sequence
public static final String kSequenceValueHash = ":1.0";
public static final String kPrimitiveSequenceValueHash = ":0000000000000000";
// Serializable
public static final String kSerializableValueHash = ":1.0";
public static final String kSerializableStubValue = "Serializable";
// Externalizable
public static final String kExternalizableValueHash = ":1.0";
public static final String kExternalizableStubValue = "Externalizable";
// Remote (The empty string is used for java.rmi.Remote)
public static final String kRemoteValueHash = "";
public static final String kRemoteStubValue = "";
public static final String kRemoteTypeStr = "";
public static final String kRemoteValueRepID = "";
private static final Hashtable kSpecialArrayTypeStrings = new Hashtable();
static {
kSpecialArrayTypeStrings.put("CORBA.WStringValue", new StringBuffer(java.lang.String.class.getName()));
kSpecialArrayTypeStrings.put("javax.rmi.CORBA.ClassDesc", new StringBuffer(java.lang.Class.class.getName()));
kSpecialArrayTypeStrings.put("CORBA.Object", new StringBuffer(java.rmi.Remote.class.getName()));
}
private static final Hashtable kSpecialCasesRepIDs = new Hashtable();
static {
kSpecialCasesRepIDs.put(java.lang.String.class, kWStringValueRepID);
kSpecialCasesRepIDs.put(java.lang.Class.class, kClassDescValueRepID);
kSpecialCasesRepIDs.put(java.rmi.Remote.class, kRemoteValueRepID);
}
private static final Hashtable kSpecialCasesStubValues = new Hashtable();
static {
kSpecialCasesStubValues.put(java.lang.String.class, kWStringStubValue);
kSpecialCasesStubValues.put(java.lang.Class.class, kClassDescStubValue);
kSpecialCasesStubValues.put(java.lang.Object.class, kObjectStubValue);
kSpecialCasesStubValues.put(java.io.Serializable.class, kSerializableStubValue);
kSpecialCasesStubValues.put(java.io.Externalizable.class, kExternalizableStubValue);
kSpecialCasesStubValues.put(java.rmi.Remote.class, kRemoteStubValue);
}
private static final Hashtable kSpecialCasesVersions = new Hashtable();
static {
kSpecialCasesVersions.put(java.lang.String.class, kWStringValueHash);
kSpecialCasesVersions.put(java.lang.Class.class, kClassDescValueHash);
kSpecialCasesVersions.put(java.lang.Object.class, kObjectValueHash);
kSpecialCasesVersions.put(java.io.Serializable.class, kSerializableValueHash);
kSpecialCasesVersions.put(java.io.Externalizable.class, kExternalizableValueHash);
kSpecialCasesVersions.put(java.rmi.Remote.class, kRemoteValueHash);
}
private static final Hashtable kSpecialCasesClasses = new Hashtable();
static {
kSpecialCasesClasses.put(kWStringTypeStr, java.lang.String.class);
kSpecialCasesClasses.put(kClassDescTypeStr, java.lang.Class.class);
kSpecialCasesClasses.put(kRemoteTypeStr, java.rmi.Remote.class);
kSpecialCasesClasses.put("org.omg.CORBA.WStringValue", java.lang.String.class);
kSpecialCasesClasses.put("javax.rmi.CORBA.ClassDesc", java.lang.Class.class);
//kSpecialCasesClasses.put(kRemoteTypeStr, java.rmi.Remote.class);
}
private static final Hashtable kSpecialCasesArrayPrefix = new Hashtable();
static {
kSpecialCasesArrayPrefix.put(java.lang.String.class, kValuePrefix + kSequencePrefix + kCORBAPrefix);
kSpecialCasesArrayPrefix.put(java.lang.Class.class, kValuePrefix + kSequencePrefix + "javax/rmi/CORBA/");
kSpecialCasesArrayPrefix.put(java.lang.Object.class, kValuePrefix + kSequencePrefix + "java/lang/");
kSpecialCasesArrayPrefix.put(java.io.Serializable.class, kValuePrefix + kSequencePrefix + "java/io/");
kSpecialCasesArrayPrefix.put(java.io.Externalizable.class, kValuePrefix + kSequencePrefix + "java/io/");
kSpecialCasesArrayPrefix.put(java.rmi.Remote.class, kValuePrefix + kSequencePrefix + kCORBAPrefix);
}
private static final Hashtable kSpecialPrimitives = new Hashtable();
static {
kSpecialPrimitives.put("int","long");
kSpecialPrimitives.put("long","longlong");
kSpecialPrimitives.put("byte","octet");
}
/**
* Used to convert ascii to hex.
*/
private static final byte ASCII_HEX[] = {
(byte)'0',
(byte)'1',
(byte)'2',
(byte)'3',
(byte)'4',
(byte)'5',
(byte)'6',
(byte)'7',
(byte)'8',
(byte)'9',
(byte)'A',
(byte)'B',
(byte)'C',
(byte)'D',
(byte)'E',
(byte)'F',
};
// bug fix for 4328952; to eliminate possibility of overriding this
// in a subclass.
public static final RepositoryIdCache cache = new RepositoryIdCache();
// Interface Rep ID Strings
public static final String kjava_rmi_Remote = createForAnyType(java.rmi.Remote.class);
public static final String korg_omg_CORBA_Object = createForAnyType(org.omg.CORBA.Object.class);
// Dummy arguments for getIdFromHelper method
public static final Class kNoParamTypes[] ={};
public static final Object kNoArgs[] = {};
// To create a RepositoryID, use code similar to the following:
// RepositoryId.cache.getId( id );
RepositoryId(){}
RepositoryId(String aRepId){
init(aRepId);
}
RepositoryId init(String aRepId)
{
this.repId = aRepId;
// Special case for remote
if (aRepId.length() == 0) {
clazz = java.rmi.Remote.class;
typeString = "";
isRMIValueType = true;
suid = kInterfaceOnlyHashStr;
return this;
} else if (aRepId.equals(kWStringValueRepID)) {
clazz = java.lang.String.class;
typeString = kWStringTypeStr;
isIDLType = true;
// fix where Attempting to obtain a FullValueDescription
// for an RMI value type with a String field causes an exception.
completeClassName = "java.lang.String";
versionString = kWStringValueVersion;
return this;
} else {
String repId = convertFromISOLatin1(aRepId);
int firstIndex = repId.indexOf(':') ;
if (firstIndex == -1)
throw new IllegalArgumentException( "RepsitoryId must have the form <type>:<body>" ) ;
int secondIndex = repId.indexOf( ':', firstIndex + 1 ) ;
if (secondIndex == -1)
versionString = "" ;
else
versionString = repId.substring(secondIndex) ;
if (repId.startsWith(kIDLPrefix)) {
typeString =
repId.substring(kIDLPrefixLength, repId.indexOf(':', kIDLPrefixLength));
isIDLType = true;
if (typeString.startsWith(kIDLNamePrefix))
completeClassName = kIDLClassnamePrefix +
typeString.substring(kIDLNamePrefix.length()).replace('/','.');
else
completeClassName = typeString.replace('/','.');
} else if (repId.startsWith(kValuePrefix)) {
typeString =
repId.substring(kValuePrefixLength, repId.indexOf(':', kValuePrefixLength));
isRMIValueType = true;
if (versionString.indexOf('.') == -1) {
actualSuid = versionString.substring(1);
suid = actualSuid; // default if not explicitly specified
if (actualSuid.indexOf(':') != -1){
// we have a declared hash also
int pos = actualSuid.indexOf(':')+1;
// actualSuid = suid.substring(pos);
// suid = suid.substring(0, pos-1);
suid = actualSuid.substring(pos);
actualSuid = actualSuid.substring(0, pos-1);
}
} else {
// _REVISIT_ : Special case version failure ?
}
} else {
isSupportedFormat = false;
typeString = "" ;
}
if (typeString.startsWith(kSequencePrefix)) {
isSequence = true;
}
return this;
}
}
public final String getUnqualifiedName() {
if (unqualifiedName == null){
String className = getClassName();
int index = className.lastIndexOf('.');
if (index == -1){
unqualifiedName = className;
definedInId = "IDL::1.0";
}
else {
unqualifiedName = className.substring(index);
definedInId = "IDL:" + className.substring(0, index).replace('.','/') + ":1.0";
}
}
return unqualifiedName;
}
public final String getDefinedInId() {
if (definedInId == null){
getUnqualifiedName();
}
return definedInId;
}
public final String getTypeString() {
return typeString;
}
public final String getVersionString() {
return versionString;
}
public final String getSerialVersionUID() {
return suid;
}
public final String getActualSerialVersionUID() {
return actualSuid;
}
public final long getSerialVersionUIDAsLong() {
return suidLong;
}
public final long getActualSerialVersionUIDAsLong() {
return actualSuidLong;
}
public final boolean isRMIValueType() {
return isRMIValueType;
}
public final boolean isIDLType() {
return isIDLType;
}
public final String getRepositoryId() {
return repId;
}
public static byte[] getByteArray(String repStr) {
synchronized (repStrToByteArray){
return (byte[]) repStrToByteArray.get(repStr);
}
}
public static void setByteArray(String repStr, byte[] repStrBytes) {
synchronized (repStrToByteArray){
repStrToByteArray.put(repStr, repStrBytes);
}
}
public final boolean isSequence() {
return isSequence;
}
public final boolean isSupportedFormat() {
return isSupportedFormat;
}
// This method will return the classname from the typestring OR if the classname turns out to be
// a special class "pseudo" name, then the matching real classname is returned.
public final String getClassName() {
if (isRMIValueType)
return typeString;
else if (isIDLType)
return completeClassName;
else return null;
}
// This method calls getClazzFromType() and falls back to the repStrToClass
// cache if no class was found. It's used where any class matching the
// given repid is an acceptable result.
public final Class getAnyClassFromType() throws ClassNotFoundException {
try {
return getClassFromType();
} catch (ClassNotFoundException cnfe) {
Class clz = (Class)repStrToClass.get(repId);
if (clz != null)
return clz;
else
throw cnfe;
}
}
public final Class getClassFromType()
throws ClassNotFoundException {
if (clazz != null)
return clazz;
Class specialCase = (Class)kSpecialCasesClasses.get(getClassName());
if (specialCase != null){
clazz = specialCase;
return specialCase;
}
else
{
try{
return Util.loadClass(getClassName(), null, null);
}
catch(ClassNotFoundException cnfe){
if (defaultServerURL != null) {
try{
return getClassFromType(defaultServerURL);
}
catch(MalformedURLException mue){
throw cnfe;
}
}
else throw cnfe;
}
}
}
public final Class getClassFromType(Class expectedType, String codebase)
throws ClassNotFoundException {
if (clazz != null)
return clazz;
Class specialCase = (Class)kSpecialCasesClasses.get(getClassName());
if (specialCase != null){
clazz = specialCase;
return specialCase;
} else {
ClassLoader expectedTypeClassLoader = (expectedType == null ? null : expectedType.getClassLoader());
return Utility.loadClassOfType(getClassName(),
codebase,
expectedTypeClassLoader,
expectedType,
expectedTypeClassLoader);
}
}
public final Class getClassFromType(String url)
throws ClassNotFoundException, MalformedURLException {
return Util.loadClass(getClassName(), url, null);
}
public final String toString() {
return repId;
}
/**
* Checks to see if the FullValueDescription should be retrieved.
* @exception Throws IOException if suids do not match or if the repositoryID
* is not an RMIValueType
*/
public static boolean useFullValueDescription(Class clazz, String repositoryID)
throws IOException{
String clazzRepIDStr = createForAnyType(clazz);
if (clazzRepIDStr.equals(repositoryID))
return false;
RepositoryId targetRepid;
RepositoryId clazzRepid;
synchronized(cache) {
// to avoid race condition where multiple threads could be
// accessing this method, and their access to the cache may
// be interleaved giving unexpected results
targetRepid = cache.getId(repositoryID);
clazzRepid = cache.getId(clazzRepIDStr);
}
//ObjectStreamClass osc = ObjectStreamClass.lookup(clazz);
if ((targetRepid.isRMIValueType()) && (clazzRepid.isRMIValueType())){
if (!targetRepid.getSerialVersionUID().equals(clazzRepid.getSerialVersionUID())) {
String mssg = "Mismatched serialization UIDs : Source (Rep. ID" +
clazzRepid + ") = " +
clazzRepid.getSerialVersionUID() + " whereas Target (Rep. ID " + repositoryID +
") = " + targetRepid.getSerialVersionUID();
//com.sun.corba.se.impl.io.ValueUtility.log("RepositoryId",mssg);
throw new IOException(mssg);
}
else {
return true;
}
}
else {
throw new IOException("The repository ID is not of an RMI value type (Expected ID = " + clazzRepIDStr + "; Received ID = " + repositoryID +")");
}
}
private static String createHashString(java.io.Serializable ser) {
return createHashString(ser.getClass());
}
private static String createHashString(java.lang.Class clazz) {
if (clazz.isInterface() || !java.io.Serializable.class.isAssignableFrom(clazz))
return kInterfaceHashCode;
//ObjectStreamClass osc = ObjectStreamClass.lookup(clazz);
long actualLong = ObjectStreamClass.getActualSerialVersionUID(clazz);
String hash = null;
if (actualLong == 0)
hash = kInterfaceOnlyHashStr;
else if (actualLong == 1)
hash = kExternalizableHashStr;
else
hash = Long.toHexString(actualLong).toUpperCase();
while(hash.length() < 16){
hash = "0" + hash;
}
long declaredLong = ObjectStreamClass.getSerialVersionUID(clazz);
String declared = null;
if (declaredLong == 0)
declared = kInterfaceOnlyHashStr;
else if (declaredLong == 1)
declared = kExternalizableHashStr;
else
declared = Long.toHexString(declaredLong).toUpperCase();
while (declared.length() < 16){
declared = "0" + declared;
}
hash = hash + ":" + declared;
return ":" + hash;
}
/**
* Creates a repository ID for a sequence. This is for expert users only as
* this method assumes the object passed is an array. If passed an object
* that is not an array, it will produce a rep id for a sequence of zero
* length. This would be an error.
* @param ser The Java object to create a repository ID for
**/
public static String createSequenceRepID(java.lang.Object ser){
return createSequenceRepID(ser.getClass());
}
/**
* Creates a repository ID for a sequence. This is for expert users only as
* this method assumes the object passed is an array. If passed an object
* that is not an array, it will produce a malformed rep id.
* @param clazz The Java class to create a repository ID for
**/
public static String createSequenceRepID(java.lang.Class clazz){
synchronized (classSeqToRepStr){
String repid = (String)classSeqToRepStr.get(clazz);
if (repid != null)
return repid;
Class originalClazz = clazz;
Class type = null;
int numOfDims = 0;
while ((type = clazz.getComponentType()) != null) {
numOfDims++;
clazz = type;
}
if (clazz.isPrimitive())
repid = kValuePrefix + originalClazz.getName() + kPrimitiveSequenceValueHash;
else {
StringBuffer buf = new StringBuffer();
buf.append(kValuePrefix);
while(numOfDims-- > 0) {
buf.append("[");
}
buf.append("L");
buf.append(convertToISOLatin1(clazz.getName()));
buf.append(";");
buf.append(createHashString(clazz));
repid = buf.toString();
}
classSeqToRepStr.put(originalClazz,repid);
return repid;
}
}
public static String createForSpecialCase(java.lang.Class clazz){
if (clazz.isArray()){
return createSequenceRepID(clazz);
}
else {
return (String)kSpecialCasesRepIDs.get(clazz);
}
}
public static String createForSpecialCase(java.io.Serializable ser){
Class clazz = ser.getClass();
if (clazz.isArray()){
return createSequenceRepID(ser);
}
else
return createForSpecialCase(clazz);
}
/**
* Creates a repository ID for a normal Java Type.
* @param ser The Java object to create a repository ID for
* @exception com.sun.corba.se.impl.io.TypeMismatchException if ser implements the
* org.omg.CORBA.portable.IDLEntity interface which indicates it is an IDL Value type.
**/
public static String createForJavaType(java.io.Serializable ser)
throws com.sun.corba.se.impl.io.TypeMismatchException
{
synchronized (classToRepStr) {
String repid = createForSpecialCase(ser);
if (repid != null)
return repid;
Class clazz = ser.getClass();
repid = (String)classToRepStr.get(clazz);
if (repid != null)
return repid;
repid = kValuePrefix + convertToISOLatin1(clazz.getName()) +
createHashString(clazz);
classToRepStr.put(clazz, repid);
repStrToClass.put(repid, clazz);
return repid;
}
}
/**
* Creates a repository ID for a normal Java Type.
* @param clz The Java class to create a repository ID for
* @exception com.sun.corba.se.impl.io.TypeMismatchException if ser implements the
* org.omg.CORBA.portable.IDLEntity interface which indicates it is an IDL Value type.
**/
public static String createForJavaType(Class clz)
throws com.sun.corba.se.impl.io.TypeMismatchException
{
synchronized (classToRepStr){
String repid = createForSpecialCase(clz);
if (repid != null)
return repid;
repid = (String)classToRepStr.get(clz);
if (repid != null)
return repid;
repid = kValuePrefix + convertToISOLatin1(clz.getName()) +
createHashString(clz);
classToRepStr.put(clz, repid);
repStrToClass.put(repid, clz);
return repid;
}
}
/**
* Creates a repository ID for an IDL Java Type.
* @param ser The IDL Value object to create a repository ID for
* @param major The major version number
* @param minor The minor version number
* @exception com.sun.corba.se.impl.io.TypeMismatchException if ser does not implement the
* org.omg.CORBA.portable.IDLEntity interface which indicates it is an IDL Value type.
**/
public static String createForIDLType(Class ser, int major, int minor)
throws com.sun.corba.se.impl.io.TypeMismatchException
{
synchronized (classIDLToRepStr){
String repid = (String)classIDLToRepStr.get(ser);
if (repid != null)
return repid;
repid = kIDLPrefix + convertToISOLatin1(ser.getName()).replace('.','/') +
":" + major + "." + minor;
classIDLToRepStr.put(ser, repid);
return repid;
}
}
private static String getIdFromHelper(Class clazz){
try {
Class helperClazz = Utility.loadClassForClass(clazz.getName()+"Helper", null,
clazz.getClassLoader(), clazz, clazz.getClassLoader());
Method idMethod = helperClazz.getDeclaredMethod("id", kNoParamTypes);
return (String)idMethod.invoke(null, kNoArgs);
}
catch(java.lang.ClassNotFoundException cnfe)
{
throw new org.omg.CORBA.MARSHAL(cnfe.toString());
}
catch(java.lang.NoSuchMethodException nsme)
{
throw new org.omg.CORBA.MARSHAL(nsme.toString());
}
catch(java.lang.reflect.InvocationTargetException ite)
{
throw new org.omg.CORBA.MARSHAL(ite.toString());
}
catch(java.lang.IllegalAccessException iae)
{
throw new org.omg.CORBA.MARSHAL(iae.toString());
}
}
/**
* Createa a repository ID for the type if it is either a java type
* or an IDL type.
* @param type The type to create rep. id for
* @return The rep. id.
**/
public static String createForAnyType(Class type) {
try{
if (type.isArray())
return createSequenceRepID(type);
else if (IDLEntity.class.isAssignableFrom(type))
{
try{
return getIdFromHelper(type);
}
catch(Throwable t) {
return createForIDLType(type, 1, 0);
}
}
else return createForJavaType(type);
}
catch(com.sun.corba.se.impl.io.TypeMismatchException e){
return null;
}
}
public static boolean isAbstractBase(Class clazz) {
return (clazz.isInterface() &&
IDLEntity.class.isAssignableFrom(clazz) &&
(!ValueBase.class.isAssignableFrom(clazz)) &&
(!org.omg.CORBA.Object.class.isAssignableFrom(clazz)));
}
public static boolean isAnyRequired(Class clazz) {
return ((clazz == java.lang.Object.class) ||
(clazz == java.io.Serializable.class) ||
(clazz == java.io.Externalizable.class));
}
public static long fromHex(String hexNumber) {
if (hexNumber.startsWith("0x"))
return Long.valueOf(hexNumber.substring(2), 16).longValue();
else return Long.valueOf(hexNumber, 16).longValue();
}
/**
* Convert strings with illegal IDL identifier characters.
* <p>
* Section 5.5.7 of OBV spec.
*/
public static String convertToISOLatin1 (String name) {
int length = name.length();
if (length == 0) {
return name;
}
StringBuffer buffer = null;
for (int i = 0; i < length; i++) {
char c = name.charAt(i);
if (c > 255 || IDL_IDENTIFIER_CHARS[c] == 0) {
// We gotta convert. Have we already started?
if (buffer == null) {
// No, so get set up...
buffer = new StringBuffer(name.substring(0,i));
}
// Convert the character into the IDL escape syntax...
buffer.append(
"\\U" +
(char)ASCII_HEX[(c & 0xF000) >>> 12] +
(char)ASCII_HEX[(c & 0x0F00) >>> 8] +
(char)ASCII_HEX[(c & 0x00F0) >>> 4] +
(char)ASCII_HEX[(c & 0x000F)]);
} else {
if (buffer != null) {
buffer.append(c);
}
}
}
if (buffer != null) {
name = buffer.toString();
}
return name;
}
/**
* Convert strings with ISO Latin 1 escape sequences back to original strings.
* <p>
* Section 5.5.7 of OBV spec.
*/
private static String convertFromISOLatin1 (String name) {
int index = -1;
StringBuffer buf = new StringBuffer(name);
while ((index = buf.toString().indexOf("\\U")) != -1){
String str = "0000" + buf.toString().substring(index+2, index+6);
// Convert Hexadecimal
byte[] buffer = new byte[(str.length() - 4) / 2];
for (int i=4, j=0; i < str.length(); i +=2, j++) {
buffer[j] = (byte)((Utility.hexOf(str.charAt(i)) << 4) & 0xF0);
buffer[j] |= (byte)((Utility.hexOf(str.charAt(i+1)) << 0) & 0x0F);
}
buf = new StringBuffer(delete(buf.toString(), index, index+6));
buf.insert(index, (char)buffer[1]);
}
return buf.toString();
}
private static String delete(String str, int from, int to)
{
return str.substring(0, from) + str.substring(to, str.length());
}
private static String replace(String target, String arg, String source)
{
int i = 0;
i = target.indexOf(arg);
while(i != -1)
{
String left = target.substring(0, i);
String right = target.substring(i+arg.length());
target = new String(left+source+right);
i = target.indexOf(arg);
}
return target;
}
public static int computeValueTag(boolean codeBasePresent, int typeInfo, boolean chunkedEncoding){
int value_tag = kInitialValueTag;
if (codeBasePresent)
value_tag = value_tag | 0x00000001;
value_tag = value_tag | typeInfo;
if (chunkedEncoding)
value_tag = value_tag | kChunkedMask;
return value_tag;
}
public static boolean isCodeBasePresent(int value_tag){
return ((value_tag & 0x00000001) == 1);
}
public static int getTypeInfo(int value_tag){
return (value_tag & 0x00000006);
}
public static boolean isChunkedEncoding(int value_tag){
return ((value_tag & kChunkedMask) != 0);
}
public static String getServerURL(){
return defaultServerURL;
}
}
|
apache/flink-cdc | 36,385 | flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/typeutils/DataTypeConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cdc.runtime.typeutils;
import org.apache.flink.cdc.common.data.ArrayData;
import org.apache.flink.cdc.common.data.DateData;
import org.apache.flink.cdc.common.data.DecimalData;
import org.apache.flink.cdc.common.data.GenericArrayData;
import org.apache.flink.cdc.common.data.GenericMapData;
import org.apache.flink.cdc.common.data.LocalZonedTimestampData;
import org.apache.flink.cdc.common.data.MapData;
import org.apache.flink.cdc.common.data.TimeData;
import org.apache.flink.cdc.common.data.TimestampData;
import org.apache.flink.cdc.common.data.ZonedTimestampData;
import org.apache.flink.cdc.common.data.binary.BinaryStringData;
import org.apache.flink.cdc.common.schema.Column;
import org.apache.flink.cdc.common.types.ArrayType;
import org.apache.flink.cdc.common.types.BigIntType;
import org.apache.flink.cdc.common.types.BinaryType;
import org.apache.flink.cdc.common.types.BooleanType;
import org.apache.flink.cdc.common.types.CharType;
import org.apache.flink.cdc.common.types.DataType;
import org.apache.flink.cdc.common.types.DataTypes;
import org.apache.flink.cdc.common.types.DateType;
import org.apache.flink.cdc.common.types.DecimalType;
import org.apache.flink.cdc.common.types.DoubleType;
import org.apache.flink.cdc.common.types.FloatType;
import org.apache.flink.cdc.common.types.IntType;
import org.apache.flink.cdc.common.types.LocalZonedTimestampType;
import org.apache.flink.cdc.common.types.MapType;
import org.apache.flink.cdc.common.types.RowType;
import org.apache.flink.cdc.common.types.SmallIntType;
import org.apache.flink.cdc.common.types.TimeType;
import org.apache.flink.cdc.common.types.TimestampType;
import org.apache.flink.cdc.common.types.TinyIntType;
import org.apache.flink.cdc.common.types.VarBinaryType;
import org.apache.flink.cdc.common.types.VarCharType;
import org.apache.flink.cdc.common.types.ZonedTimestampType;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.sql.type.SqlTypeName;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/** A data type converter. */
public class DataTypeConverter {
static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1);
static final long NANOSECONDS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
static final long NANOSECONDS_PER_DAY = TimeUnit.DAYS.toNanos(1);
public static RowType toRowType(List<Column> columnList) {
DataType[] dataTypes = columnList.stream().map(Column::getType).toArray(DataType[]::new);
String[] columnNames = columnList.stream().map(Column::getName).toArray(String[]::new);
return RowType.of(dataTypes, columnNames);
}
public static Class<?> convertOriginalClass(DataType dataType) {
switch (dataType.getTypeRoot()) {
case BOOLEAN:
return Boolean.class;
case TINYINT:
return Byte.class;
case SMALLINT:
return Short.class;
case INTEGER:
return Integer.class;
case BIGINT:
return Long.class;
case DATE:
return DateData.class;
case TIME_WITHOUT_TIME_ZONE:
return TimeData.class;
case TIMESTAMP_WITHOUT_TIME_ZONE:
return TimestampData.class;
case TIMESTAMP_WITH_TIME_ZONE:
return ZonedTimestampData.class;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return LocalZonedTimestampData.class;
case FLOAT:
return Float.class;
case DOUBLE:
return Double.class;
case CHAR:
case VARCHAR:
return String.class;
case BINARY:
case VARBINARY:
return byte[].class;
case DECIMAL:
return DecimalData.class;
case ROW:
return Object.class;
case ARRAY:
return ArrayData.class;
case MAP:
return MapData.class;
default:
throw new UnsupportedOperationException("Unsupported type: " + dataType);
}
}
public static RelDataType convertCalciteRelDataType(
RelDataTypeFactory typeFactory, List<Column> columns) {
RelDataTypeFactory.Builder fieldInfoBuilder = typeFactory.builder();
for (Column column : columns) {
switch (column.getType().getTypeRoot()) {
case BOOLEAN:
BooleanType booleanType = (BooleanType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.BOOLEAN)
.nullable(booleanType.isNullable());
break;
case TINYINT:
TinyIntType tinyIntType = (TinyIntType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.TINYINT)
.nullable(tinyIntType.isNullable());
break;
case SMALLINT:
SmallIntType smallIntType = (SmallIntType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.SMALLINT)
.nullable(smallIntType.isNullable());
break;
case INTEGER:
IntType intType = (IntType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.INTEGER)
.nullable(intType.isNullable());
break;
case BIGINT:
BigIntType bigIntType = (BigIntType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.BIGINT)
.nullable(bigIntType.isNullable());
break;
case DATE:
DateType dataType = (DateType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.DATE)
.nullable(dataType.isNullable());
break;
case TIME_WITHOUT_TIME_ZONE:
TimeType timeType = (TimeType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.TIME, timeType.getPrecision())
.nullable(timeType.isNullable());
break;
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) column.getType();
fieldInfoBuilder
.add(
column.getName(),
SqlTypeName.TIMESTAMP,
timestampType.getPrecision())
.nullable(timestampType.isNullable());
break;
case TIMESTAMP_WITH_TIME_ZONE:
ZonedTimestampType zonedTimestampType = (ZonedTimestampType) column.getType();
fieldInfoBuilder
.add(
column.getName(),
SqlTypeName.TIMESTAMP,
zonedTimestampType.getPrecision())
.nullable(zonedTimestampType.isNullable());
break;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType localZonedTimestampType =
(LocalZonedTimestampType) column.getType();
fieldInfoBuilder
.add(
column.getName(),
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
localZonedTimestampType.getPrecision())
.nullable(localZonedTimestampType.isNullable());
break;
case FLOAT:
FloatType floatType = (FloatType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.FLOAT)
.nullable(floatType.isNullable());
break;
case DOUBLE:
DoubleType doubleType = (DoubleType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.DOUBLE)
.nullable(doubleType.isNullable());
break;
case CHAR:
CharType charType = (CharType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.CHAR, charType.getLength())
.nullable(charType.isNullable());
break;
case VARCHAR:
VarCharType varCharType = (VarCharType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.VARCHAR, varCharType.getLength())
.nullable(varCharType.isNullable());
break;
case BINARY:
BinaryType binaryType = (BinaryType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.BINARY, binaryType.getLength())
.nullable(binaryType.isNullable());
break;
case VARBINARY:
VarBinaryType varBinaryType = (VarBinaryType) column.getType();
fieldInfoBuilder
.add(column.getName(), SqlTypeName.VARBINARY, varBinaryType.getLength())
.nullable(varBinaryType.isNullable());
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getType();
fieldInfoBuilder
.add(
column.getName(),
SqlTypeName.DECIMAL,
decimalType.getPrecision(),
decimalType.getScale())
.nullable(decimalType.isNullable());
break;
case ROW:
List<RelDataType> dataTypes =
((RowType) column.getType())
.getFieldTypes().stream()
.map((type) -> convertCalciteType(typeFactory, type))
.collect(Collectors.toList());
fieldInfoBuilder
.add(
column.getName(),
typeFactory.createStructType(
dataTypes,
((RowType) column.getType()).getFieldNames()))
.nullable(true);
break;
case ARRAY:
DataType elementType = ((ArrayType) column.getType()).getElementType();
fieldInfoBuilder
.add(
column.getName(),
typeFactory.createArrayType(
convertCalciteType(typeFactory, elementType), -1))
.nullable(true);
break;
case MAP:
RelDataType keyType =
convertCalciteType(
typeFactory, ((MapType) column.getType()).getKeyType());
RelDataType valueType =
convertCalciteType(
typeFactory, ((MapType) column.getType()).getValueType());
fieldInfoBuilder
.add(column.getName(), typeFactory.createMapType(keyType, valueType))
.nullable(true);
break;
default:
throw new UnsupportedOperationException(
"Unsupported type: " + column.getType());
}
}
return fieldInfoBuilder.build();
}
public static RelDataType convertCalciteType(
RelDataTypeFactory typeFactory, DataType dataType) {
switch (dataType.getTypeRoot()) {
case BOOLEAN:
return typeFactory.createSqlType(SqlTypeName.BOOLEAN);
case TINYINT:
return typeFactory.createSqlType(SqlTypeName.TINYINT);
case SMALLINT:
return typeFactory.createSqlType(SqlTypeName.SMALLINT);
case INTEGER:
return typeFactory.createSqlType(SqlTypeName.INTEGER);
case BIGINT:
return typeFactory.createSqlType(SqlTypeName.BIGINT);
case DATE:
return typeFactory.createSqlType(SqlTypeName.DATE);
case TIME_WITHOUT_TIME_ZONE:
TimeType timeType = (TimeType) dataType;
return typeFactory.createSqlType(SqlTypeName.TIME, timeType.getPrecision());
case TIMESTAMP_WITHOUT_TIME_ZONE:
TimestampType timestampType = (TimestampType) dataType;
return typeFactory.createSqlType(
SqlTypeName.TIMESTAMP, timestampType.getPrecision());
case TIMESTAMP_WITH_TIME_ZONE:
// TODO: Bump Calcite to support its TIMESTAMP_TZ type via #FLINK-37123
throw new UnsupportedOperationException("Unsupported type: TIMESTAMP_TZ");
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
LocalZonedTimestampType localZonedTimestampType =
(LocalZonedTimestampType) dataType;
return typeFactory.createSqlType(
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE,
localZonedTimestampType.getPrecision());
case FLOAT:
return typeFactory.createSqlType(SqlTypeName.FLOAT);
case DOUBLE:
return typeFactory.createSqlType(SqlTypeName.DOUBLE);
case CHAR:
CharType charType = (CharType) dataType;
return typeFactory.createSqlType(SqlTypeName.CHAR, charType.getLength());
case VARCHAR:
VarCharType varCharType = (VarCharType) dataType;
return typeFactory.createSqlType(SqlTypeName.VARCHAR, varCharType.getLength());
case BINARY:
BinaryType binaryType = (BinaryType) dataType;
return typeFactory.createSqlType(SqlTypeName.BINARY, binaryType.getLength());
case VARBINARY:
VarBinaryType varBinaryType = (VarBinaryType) dataType;
return typeFactory.createSqlType(SqlTypeName.VARBINARY, varBinaryType.getLength());
case DECIMAL:
DecimalType decimalType = (DecimalType) dataType;
return typeFactory.createSqlType(
SqlTypeName.DECIMAL, decimalType.getPrecision(), decimalType.getScale());
case ROW:
List<RelDataType> dataTypes =
((RowType) dataType)
.getFieldTypes().stream()
.map((type) -> convertCalciteType(typeFactory, type))
.collect(Collectors.toList());
return typeFactory.createStructType(
dataTypes, ((RowType) dataType).getFieldNames());
case ARRAY:
DataType elementType = ((ArrayType) dataType).getElementType();
return typeFactory.createArrayType(
convertCalciteType(typeFactory, elementType), -1);
case MAP:
RelDataType keyType =
convertCalciteType(typeFactory, ((MapType) dataType).getKeyType());
RelDataType valueType =
convertCalciteType(typeFactory, ((MapType) dataType).getValueType());
return typeFactory.createMapType(keyType, valueType);
default:
throw new UnsupportedOperationException("Unsupported type: " + dataType);
}
}
public static DataType convertCalciteRelDataTypeToDataType(RelDataType relDataType) {
switch (relDataType.getSqlTypeName()) {
case BOOLEAN:
return DataTypes.BOOLEAN();
case TINYINT:
return DataTypes.TINYINT();
case SMALLINT:
return DataTypes.SMALLINT();
case INTEGER:
return DataTypes.INT();
case BIGINT:
return DataTypes.BIGINT();
case DATE:
return DataTypes.DATE();
case TIME:
return DataTypes.TIME(relDataType.getPrecision());
case TIMESTAMP:
return DataTypes.TIMESTAMP(relDataType.getPrecision());
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return DataTypes.TIMESTAMP_LTZ(relDataType.getPrecision());
case FLOAT:
return DataTypes.FLOAT();
case DOUBLE:
return DataTypes.DOUBLE();
case CHAR:
case VARCHAR:
return DataTypes.STRING();
case BINARY:
return DataTypes.BINARY(relDataType.getPrecision());
case VARBINARY:
return DataTypes.VARBINARY(relDataType.getPrecision());
case DECIMAL:
return DataTypes.DECIMAL(relDataType.getPrecision(), relDataType.getScale());
case ARRAY:
RelDataType componentType = relDataType.getComponentType();
return DataTypes.ARRAY(convertCalciteRelDataTypeToDataType(componentType));
case MAP:
RelDataType keyType = relDataType.getKeyType();
RelDataType valueType = relDataType.getValueType();
return DataTypes.MAP(
convertCalciteRelDataTypeToDataType(keyType),
convertCalciteRelDataTypeToDataType(valueType));
case ROW:
default:
throw new UnsupportedOperationException(
"Unsupported type: " + relDataType.getSqlTypeName());
}
}
public static Object convert(Object value, DataType dataType) {
if (value == null) {
return null;
}
switch (dataType.getTypeRoot()) {
case BOOLEAN:
return convertToBoolean(value);
case TINYINT:
return convertToByte(value);
case SMALLINT:
return convertToShort(value);
case INTEGER:
return convertToInt(value);
case BIGINT:
return convertToLong(value);
case DATE:
return convertToDate(value);
case TIME_WITHOUT_TIME_ZONE:
return convertToTime(value);
case TIMESTAMP_WITHOUT_TIME_ZONE:
return convertToTimestamp(value);
case TIMESTAMP_WITH_TIME_ZONE:
return convertToZonedTimestampData(value);
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return convertToLocalTimeZoneTimestamp(value);
case FLOAT:
return convertToFloat(value);
case DOUBLE:
return convertToDouble(value);
case CHAR:
case VARCHAR:
return convertToString(value);
case BINARY:
case VARBINARY:
return convertToBinary(value);
case DECIMAL:
return convertToDecimal(value);
case ROW:
return value;
case ARRAY:
return convertToArray(value, (ArrayType) dataType);
case MAP:
return convertToMap(value, (MapType) dataType);
default:
throw new UnsupportedOperationException("Unsupported type: " + dataType);
}
}
public static Object convertToOriginal(Object value, DataType dataType) {
if (value == null) {
return null;
}
switch (dataType.getTypeRoot()) {
case BOOLEAN:
return convertToBoolean(value);
case TINYINT:
return convertToByte(value);
case SMALLINT:
return convertToShort(value);
case INTEGER:
return convertToInt(value);
case BIGINT:
return convertToLong(value);
case DATE:
return convertToDate(value);
case TIME_WITHOUT_TIME_ZONE:
return convertToTime(value);
case TIMESTAMP_WITHOUT_TIME_ZONE:
return convertToTimestamp(value);
case TIMESTAMP_WITH_TIME_ZONE:
return convertToZonedTimestampData(value);
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
return convertToLocalTimeZoneTimestamp(value);
case FLOAT:
return convertToFloat(value);
case DOUBLE:
return convertToDouble(value);
case CHAR:
case VARCHAR:
return convertToStringOriginal(value);
case BINARY:
case VARBINARY:
return convertToBinary(value);
case DECIMAL:
return convertToDecimal(value);
case ROW:
return value;
case ARRAY:
return convertToArrayOriginal(value, (ArrayType) dataType);
case MAP:
return convertToMapOriginal(value, (MapType) dataType);
default:
throw new UnsupportedOperationException("Unsupported type: " + dataType);
}
}
private static Object convertToBoolean(Object obj) {
if (obj instanceof Boolean) {
return obj;
} else if (obj instanceof Byte) {
return (byte) obj == 1;
} else if (obj instanceof Short) {
return (short) obj == 1;
} else {
return Boolean.parseBoolean(obj.toString());
}
}
private static Object convertToByte(Object obj) {
return Byte.parseByte(obj.toString());
}
private static Object convertToShort(Object obj) {
return Short.parseShort(obj.toString());
}
private static Object convertToInt(Object obj) {
if (obj instanceof Integer) {
return obj;
} else if (obj instanceof Long) {
return ((Long) obj).intValue();
} else {
return Integer.parseInt(obj.toString());
}
}
private static Object convertToLong(Object obj) {
if (obj instanceof Integer) {
return ((Integer) obj).longValue();
} else if (obj instanceof Long) {
return obj;
} else {
return Long.parseLong(obj.toString());
}
}
private static Object convertToFloat(Object obj) {
if (obj instanceof Float) {
return obj;
} else if (obj instanceof Double) {
return ((Double) obj).floatValue();
} else {
return Float.parseFloat(obj.toString());
}
}
private static Object convertToDouble(Object obj) {
if (obj instanceof Float) {
return ((Float) obj).doubleValue();
} else if (obj instanceof Double) {
return obj;
} else {
return Double.parseDouble(obj.toString());
}
}
private static DateData convertToDate(Object obj) {
if (obj instanceof DateData) {
return (DateData) obj;
}
return DateData.fromLocalDate(toLocalDate(obj));
}
private static LocalDate toLocalDate(Object obj) {
if (obj == null) {
return null;
}
if (obj instanceof LocalDate) {
return (LocalDate) obj;
}
if (obj instanceof LocalDateTime) {
return ((LocalDateTime) obj).toLocalDate();
}
if (obj instanceof java.sql.Date) {
return ((java.sql.Date) obj).toLocalDate();
}
if (obj instanceof java.sql.Time) {
throw new IllegalArgumentException(
"Unable to convert to LocalDate from a java.sql.Time value '" + obj + "'");
}
if (obj instanceof java.util.Date) {
java.util.Date date = (java.util.Date) obj;
return LocalDate.of(date.getYear() + 1900, date.getMonth() + 1, date.getDate());
}
if (obj instanceof Long) {
// Assume the value is the epoch day number
return LocalDate.ofEpochDay((Long) obj);
}
if (obj instanceof Integer) {
// Assume the value is the epoch day number
return LocalDate.ofEpochDay((Integer) obj);
}
throw new IllegalArgumentException(
"Unable to convert to LocalDate from unexpected value '"
+ obj
+ "' of type "
+ obj.getClass().getName());
}
private static TimeData convertToTime(Object obj) {
if (obj instanceof TimeData) {
return (TimeData) obj;
}
return TimeData.fromLocalTime(toLocalTime(obj));
}
private static Object convertToArray(Object obj, ArrayType arrayType) {
if (obj instanceof ArrayData) {
return obj;
}
if (obj instanceof List) {
List<?> list = (List<?>) obj;
GenericArrayData arrayData = new GenericArrayData(list.toArray());
return arrayData;
}
if (obj.getClass().isArray()) {
return new GenericArrayData((Object[]) obj);
}
throw new IllegalArgumentException("Unable to convert to ArrayData: " + obj);
}
private static Object convertToArrayOriginal(Object obj, ArrayType arrayType) {
if (obj instanceof ArrayData) {
ArrayData arrayData = (ArrayData) obj;
Object[] result = new Object[arrayData.size()];
for (int i = 0; i < arrayData.size(); i++) {
result[i] = getArrayElement(arrayData, i, arrayType.getElementType());
}
return result;
}
return obj;
}
private static Object getArrayElement(ArrayData arrayData, int pos, DataType elementType) {
switch (elementType.getTypeRoot()) {
case BOOLEAN:
return arrayData.getBoolean(pos);
case TINYINT:
return arrayData.getByte(pos);
case SMALLINT:
return arrayData.getShort(pos);
case INTEGER:
return arrayData.getInt(pos);
case BIGINT:
return arrayData.getLong(pos);
case FLOAT:
return arrayData.getFloat(pos);
case DOUBLE:
return arrayData.getDouble(pos);
case CHAR:
case VARCHAR:
return arrayData.getString(pos);
case DECIMAL:
return arrayData.getDecimal(
pos,
((DecimalType) elementType).getPrecision(),
((DecimalType) elementType).getScale());
case DATE:
return arrayData.getInt(pos);
case TIME_WITHOUT_TIME_ZONE:
return arrayData.getInt(pos);
case TIMESTAMP_WITHOUT_TIME_ZONE:
return arrayData.getTimestamp(pos, ((TimestampType) elementType).getPrecision());
case ARRAY:
return convertToArrayOriginal(arrayData.getArray(pos), (ArrayType) elementType);
case MAP:
return convertToMapOriginal(arrayData.getMap(pos), (MapType) elementType);
default:
throw new UnsupportedOperationException(
"Unsupported array element type: " + elementType);
}
}
private static Object convertToMap(Object obj, MapType mapType) {
if (obj instanceof MapData) {
return obj;
}
if (obj instanceof Map) {
Map<?, ?> javaMap = (Map<?, ?>) obj;
GenericMapData mapData = new GenericMapData(javaMap);
return mapData;
}
throw new IllegalArgumentException("Unable to convert to MapData: " + obj);
}
private static Object convertToMapOriginal(Object obj, MapType mapType) {
if (obj instanceof MapData) {
MapData mapData = (MapData) obj;
Map<Object, Object> result = new HashMap<>();
ArrayData keyArray = mapData.keyArray();
ArrayData valueArray = mapData.valueArray();
for (int i = 0; i < mapData.size(); i++) {
Object key = getArrayElement(keyArray, i, mapType.getKeyType());
Object value = getArrayElement(valueArray, i, mapType.getValueType());
result.put(key, value);
}
return result;
}
return obj;
}
private static LocalTime toLocalTime(Object obj) {
if (obj == null) {
return null;
}
if (obj instanceof LocalTime) {
return (LocalTime) obj;
}
if (obj instanceof LocalDateTime) {
return ((LocalDateTime) obj).toLocalTime();
}
if (obj instanceof java.sql.Date) {
throw new IllegalArgumentException(
"Unable to convert to LocalDate from a java.sql.Date value '" + obj + "'");
}
if (obj instanceof java.sql.Time) {
java.sql.Time time = (java.sql.Time) obj;
long millis = (int) (time.getTime() % MILLISECONDS_PER_SECOND);
int nanosOfSecond = (int) (millis * NANOSECONDS_PER_MILLISECOND);
return LocalTime.of(
time.getHours(), time.getMinutes(), time.getSeconds(), nanosOfSecond);
}
if (obj instanceof java.sql.Timestamp) {
java.sql.Timestamp timestamp = (java.sql.Timestamp) obj;
return LocalTime.of(
timestamp.getHours(),
timestamp.getMinutes(),
timestamp.getSeconds(),
timestamp.getNanos());
}
if (obj instanceof java.util.Date) {
java.util.Date date = (java.util.Date) obj;
long millis = (int) (date.getTime() % MILLISECONDS_PER_SECOND);
int nanosOfSecond = (int) (millis * NANOSECONDS_PER_MILLISECOND);
return LocalTime.of(
date.getHours(), date.getMinutes(), date.getSeconds(), nanosOfSecond);
}
if (obj instanceof Duration) {
Long value = ((Duration) obj).toNanos();
if (value >= 0 && value <= NANOSECONDS_PER_DAY) {
return LocalTime.ofNanoOfDay(value);
} else {
throw new IllegalArgumentException(
"Time values must use number of milliseconds greater than 0 and less than 86400000000000");
}
}
throw new IllegalArgumentException(
"Unable to convert to LocalTime from unexpected value '"
+ obj
+ "' of type "
+ obj.getClass().getName());
}
private static Object convertToTimestamp(Object obj) {
if (obj instanceof Long) {
return TimestampData.fromMillis((Long) obj);
} else if (obj instanceof Timestamp) {
return TimestampData.fromTimestamp((Timestamp) obj);
} else if (obj instanceof TimestampData) {
return obj;
}
throw new IllegalArgumentException(
"Unable to convert to TIMESTAMP from unexpected value '"
+ obj
+ "' of type "
+ obj.getClass().getName());
}
private static Object convertToZonedTimestampData(Object obj) {
if (obj instanceof ZonedTimestampData) {
return obj;
}
throw new IllegalArgumentException(
"Unable to convert to TIMESTAMP_TZ from unexpected value '"
+ obj
+ "' of type "
+ obj.getClass().getName());
}
private static Object convertToLocalTimeZoneTimestamp(Object obj) {
if (obj instanceof String) {
String str = (String) obj;
// TIMESTAMP_LTZ type is encoded in string type
Instant instant = Instant.parse(str);
return LocalZonedTimestampData.fromInstant(instant);
} else if (obj instanceof Long) {
return LocalZonedTimestampData.fromEpochMillis((Long) obj);
} else if (obj instanceof LocalZonedTimestampData) {
return obj;
}
throw new IllegalArgumentException(
"Unable to convert to TIMESTAMP_LTZ from unexpected value '"
+ obj
+ "' of type "
+ obj.getClass().getName());
}
private static Object convertToString(Object obj) {
return BinaryStringData.fromString(obj.toString());
}
private static Object convertToStringOriginal(Object obj) {
return String.valueOf(obj);
}
private static Object convertToBinary(Object obj) {
if (obj instanceof byte[]) {
return obj;
} else if (obj instanceof ByteBuffer) {
ByteBuffer byteBuffer = (ByteBuffer) obj;
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
return bytes;
} else {
throw new UnsupportedOperationException(
"Unsupported BYTES value type: " + obj.getClass().getSimpleName());
}
}
// convert to DecimalData
private static Object convertToDecimal(Object obj) {
if (obj instanceof BigDecimal) {
BigDecimal bigDecimalValue = (BigDecimal) obj;
return DecimalData.fromBigDecimal(
bigDecimalValue, bigDecimalValue.precision(), bigDecimalValue.scale());
} else if (obj instanceof DecimalData) {
return obj;
} else {
throw new UnsupportedOperationException(
"Unsupported Decimal value type: " + obj.getClass().getSimpleName());
}
}
}
|
apache/incubator-kie-drools | 35,975 | drools-core/src/main/java/org/drools/core/time/impl/KieCronExpression.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.drools.core.time.impl;
import java.io.Serializable;
import java.text.ParseException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.TreeSet;
/**
* Provides a parser and evaluator for unix-like cron expressions. Cron
* expressions provide the ability to specify complex time combinations such as
* "At 8:00am every Monday through Friday" or "At 1:30am every
* last Friday of the month".
* <P>
* Cron expressions are comprised of 6 required fields and one optional field
* separated by white space. The fields respectively are described as follows:
*
* <table cellspacing="8">
* <tr>
* <th align="left">Field Name</th>
* <th align="left"> </th>
* <th align="left">Allowed Values</th>
* <th align="left"> </th>
* <th align="left">Allowed Special Characters</th>
* </tr>
* <tr>
* <td align="left"><code>Seconds</code></td>
* <td align="left"> </th>
* <td align="left"><code>0-59</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * /</code></td>
* </tr>
* <tr>
* <td align="left"><code>Minutes</code></td>
* <td align="left"> </th>
* <td align="left"><code>0-59</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * /</code></td>
* </tr>
* <tr>
* <td align="left"><code>Hours</code></td>
* <td align="left"> </th>
* <td align="left"><code>0-23</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * /</code></td>
* </tr>
* <tr>
* <td align="left"><code>Day-of-month</code></td>
* <td align="left"> </th>
* <td align="left"><code>1-31</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * ? / L W</code></td>
* </tr>
* <tr>
* <td align="left"><code>Month</code></td>
* <td align="left"> </th>
* <td align="left"><code>1-12 or JAN-DEC</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * /</code></td>
* </tr>
* <tr>
* <td align="left"><code>Day-of-Week</code></td>
* <td align="left"> </th>
* <td align="left"><code>1-7 or SUN-SAT</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * ? / L #</code></td>
* </tr>
* <tr>
* <td align="left"><code>Year (Optional)</code></td>
* <td align="left"> </th>
* <td align="left"><code>empty, 1970-2199</code></td>
* <td align="left"> </th>
* <td align="left"><code>, - * /</code></td>
* </tr>
* </table>
* <P>
* The '*' character is used to specify all values. For example, "*"
* in the minute field means "every minute".
* <P>
* The '?' character is allowed for the day-of-month and day-of-week fields. It
* is used to specify 'no specific value'. This is useful when you need to
* specify something in one of the two fields, but not the other.
* <P>
* The '-' character is used to specify ranges For example "10-12" in
* the hour field means "the hours 10, 11 and 12".
* <P>
* The ',' character is used to specify additional values. For example
* "MON,WED,FRI" in the day-of-week field means "the days Monday,
* Wednesday, and Friday".
* <P>
* The '/' character is used to specify increments. For example "0/15"
* in the seconds field means "the seconds 0, 15, 30, and 45". And
* "5/15" in the seconds field means "the seconds 5, 20, 35, and
* 50". Specifying '*' before the '/' is equivalent to specifying 0 is
* the value to start with. Essentially, for each field in the expression, there
* is a set of numbers that can be turned on or off. For seconds and minutes,
* the numbers range from 0 to 59. For hours 0 to 23, for days of the month 0 to
* 31, and for months 1 to 12. The "/" character simply helps you turn
* on every "nth" value in the given set. Thus "7/6" in the
* month field only turns on month "7", it does NOT mean every 6th
* month, please note that subtlety.
* <P>
* The 'L' character is allowed for the day-of-month and day-of-week fields.
* This character is short-hand for "last", but it has different
* meaning in each of the two fields. For example, the value "L" in
* the day-of-month field means "the last day of the month" - day 31
* for January, day 28 for February on non-leap years. If used in the
* day-of-week field by itself, it simply means "7" or
* "SAT". But if used in the day-of-week field after another value, it
* means "the last xxx day of the month" - for example "6L"
* means "the last friday of the month". When using the 'L' option, it
* is important not to specify lists, or ranges of values, as you'll get
* confusing results.
* <P>
* The 'W' character is allowed for the day-of-month field. This character
* is used to specify the weekday (Monday-Friday) nearest the given day. As an
* example, if you were to specify "15W" as the value for the
* day-of-month field, the meaning is: "the nearest weekday to the 15th of
* the month". So if the 15th is a Saturday, the trigger will fire on
* Friday the 14th. If the 15th is a Sunday, the trigger will fire on Monday the
* 16th. If the 15th is a Tuesday, then it will fire on Tuesday the 15th.
* However if you specify "1W" as the value for day-of-month, and the
* 1st is a Saturday, the trigger will fire on Monday the 3rd, as it will not
* 'jump' over the boundary of a month's days. The 'W' character can only be
* specified when the day-of-month is a single day, not a range or list of days.
* <P>
* The 'L' and 'W' characters can also be combined for the day-of-month
* expression to yield 'LW', which translates to "last weekday of the
* month".
* <P>
* The '#' character is allowed for the day-of-week field. This character is
* used to specify "the nth" XXX day of the month. For example, the
* value of "6#3" in the day-of-week field means the third Friday of
* the month (day 6 = Friday and "#3" = the 3rd one in the month).
* Other examples: "2#1" = the first Monday of the month and
* "4#5" = the fifth Wednesday of the month. Note that if you specify
* "#5" and there is not 5 of the given day-of-week in the month, then
* no firing will occur that month. If the '#' character is used, there can
* only be one expression in the day-of-week field ("3#1,6#3" is
* not valid, since there are two expressions).
* <P>
* <!--The 'C' character is allowed for the day-of-month and day-of-week fields.
* This character is short-hand for "calendar". This means values are
* calculated against the associated calendar, if any. If no calendar is
* associated, then it is equivalent to having an all-inclusive calendar. A
* value of "5C" in the day-of-month field means "the first day included by the
* calendar on or after the 5th". A value of "1C" in the day-of-week field
* means "the first day included by the calendar on or after sunday".-->
* <P>
* The legal characters and the names of months and days of the week are not
* case sensitive.
*
* <p>
* <b>NOTES:</b>
* <ul>
* <li>Support for specifying both a day-of-week and a day-of-month value is
* not complete (you'll need to use the '?' character in one of these fields).
* </li>
* <li>Overflowing ranges is supported - that is, having a larger number on
* the left hand side than the right. You might do 22-2 to catch 10 o'clock
* at night until 2 o'clock in the morning, or you might have NOV-FEB. It is
* very important to note that overuse of overflowing ranges creates ranges
* that don't make sense and no effort has been made to determine which
* interpretation CronExpression chooses. An example would be
* "0 0 14-6 ? * FRI-MON". </li>
* </ul>
* </p>
*/
public class KieCronExpression implements Serializable {
private static final long serialVersionUID = 510L;
protected static final int YEAR_TO_GIVEUP_SCHEDULING_AT = 2299;
protected static final int SECOND = 0;
protected static final int MINUTE = 1;
protected static final int HOUR = 2;
protected static final int DAY_OF_MONTH = 3;
protected static final int MONTH = 4;
protected static final int DAY_OF_WEEK = 5;
protected static final int YEAR = 6;
protected static final int ALL_SPEC_INT = 99; // '*'
protected static final int NO_SPEC_INT = 98; // '?'
protected static final Integer ALL_SPEC = ALL_SPEC_INT;
protected static final Integer NO_SPEC = NO_SPEC_INT;
protected static final Map<String, Integer> monthMap = new HashMap<>(20);
protected static final Map<String, Integer> dayMap = new HashMap<>(60);
static {
monthMap.put("JAN", 0);
monthMap.put("FEB", 1);
monthMap.put("MAR", 2);
monthMap.put("APR", 3);
monthMap.put("MAY", 4);
monthMap.put("JUN", 5);
monthMap.put("JUL", 6);
monthMap.put("AUG", 7);
monthMap.put("SEP", 8);
monthMap.put("OCT", 9);
monthMap.put("NOV", 10);
monthMap.put("DEC", 11);
dayMap.put("SUN", 1);
dayMap.put("MON", 2);
dayMap.put("TUE", 3);
dayMap.put("WED", 4);
dayMap.put("THU", 5);
dayMap.put("FRI", 6);
dayMap.put("SAT", 7);
}
protected String cronExpression;
protected transient TreeSet seconds;
protected transient TreeSet minutes;
protected transient TreeSet hours;
protected transient TreeSet daysOfMonth;
protected transient TreeSet months;
protected transient TreeSet daysOfWeek;
protected transient TreeSet years;
protected transient boolean lastdayOfWeek = false;
protected transient int nthdayOfWeek = 0;
protected transient boolean lastdayOfMonth = false;
protected transient boolean nearestWeekday = false;
protected transient boolean expressionParsed = false;
/**
* Constructs a new <CODE>CronExpression</CODE> based on the specified
* parameter.
* @param cronExpression String representation of the cron expression the
* new object should represent
* @throws ParseException if the string expression cannot be parsed into a valid
* <CODE>CronExpression</CODE>
*/
public KieCronExpression(String cronExpression) throws ParseException {
if (cronExpression == null) {
throw new IllegalArgumentException("cronExpression cannot be null");
}
this.cronExpression = cronExpression.toUpperCase(Locale.US);
buildExpression(this.cronExpression);
}
/**
* Returns the string representation of the <CODE>CronExpression</CODE>
* @return a string representation of the <CODE>CronExpression</CODE>
*/
public String toString() {
return cronExpression;
}
/**
* Indicates whether the specified cron expression can be parsed into a
* valid cron expression
* @param cronExpression the expression to evaluate
* @return a boolean indicating whether the given expression is a valid cron
* expression
*/
public static boolean isValidExpression(String cronExpression) {
try {
new KieCronExpression(cronExpression);
} catch (ParseException pe) {
return false;
}
return true;
}
////////////////////////////////////////////////////////////////////////////
//
// Expression Parsing Functions
//
////////////////////////////////////////////////////////////////////////////
protected void buildExpression(String expression) throws ParseException {
expressionParsed = true;
try {
if (seconds == null) {
seconds = new TreeSet();
}
if (minutes == null) {
minutes = new TreeSet();
}
if (hours == null) {
hours = new TreeSet();
}
if (daysOfMonth == null) {
daysOfMonth = new TreeSet();
}
if (months == null) {
months = new TreeSet();
}
if (daysOfWeek == null) {
daysOfWeek = new TreeSet();
}
if (years == null) {
years = new TreeSet();
}
int exprOn = SECOND;
Iterator<String> exprsTok = Arrays.asList(expression.split(" |\t")).iterator();
while (exprsTok.hasNext() && exprOn <= YEAR) {
String expr = exprsTok.next().trim();
// throw an exception if L is used with other days of the month
if (exprOn == DAY_OF_MONTH && expr.indexOf('L') != -1 && expr.length() > 1 && expr.contains(",")) {
throw new ParseException("Support for specifying 'L' and 'LW' with other days of the month is not implemented", -1);
}
// throw an exception if L is used with other days of the week
if (exprOn == DAY_OF_WEEK && expr.indexOf('L') != -1 && expr.length() > 1 && expr.contains(",")) {
throw new ParseException("Support for specifying 'L' with other days of the week is not implemented", -1);
}
Iterator<String> vTok = Arrays.asList(expr.split(",")).iterator();
while (vTok.hasNext()) {
String v = vTok.next();
storeExpressionVals(0, v, exprOn);
}
exprOn++;
}
if (exprOn <= DAY_OF_WEEK) {
throw new ParseException("Unexpected end of expression.",
expression.length());
}
if (exprOn == YEAR) {
storeExpressionVals(0, "*", YEAR);
}
TreeSet dow = getSet(DAY_OF_WEEK);
TreeSet dom = getSet(DAY_OF_MONTH);
// Copying the logic from the UnsupportedOperationException below
boolean dayOfMSpec = !dom.contains(NO_SPEC);
boolean dayOfWSpec = !dow.contains(NO_SPEC);
if ((dayOfMSpec && dayOfWSpec) || (!dayOfMSpec && !dayOfWSpec)) {
throw new ParseException(
"Support for specifying both or none of day-of-week AND a day-of-month parameters is not implemented.", 0);
}
} catch (ParseException pe) {
throw pe;
} catch (Exception e) {
throw new ParseException("Illegal cron expression format ("
+ e.toString() + ")", 0);
}
}
protected int storeExpressionVals(int pos, String s, int type)
throws ParseException {
int incr = 0;
int i = skipWhiteSpace(pos, s);
if (i >= s.length()) {
return i;
}
char c = s.charAt(i);
if ((c >= 'A') && (c <= 'Z') && (!s.equals("L")) && (!s.equals("LW"))) {
String sub = s.substring(i, i + 3);
int sval;
int eval = -1;
if (type == MONTH) {
sval = getMonthNumber(sub) + 1;
if (sval <= 0) {
throw new ParseException("Invalid Month value: '" + sub + "'", i);
}
if (s.length() > i + 3) {
c = s.charAt(i + 3);
if (c == '-') {
i += 4;
sub = s.substring(i, i + 3);
eval = getMonthNumber(sub) + 1;
if (eval <= 0) {
throw new ParseException("Invalid Month value: '" + sub + "'", i);
}
}
}
} else if (type == DAY_OF_WEEK) {
sval = getDayOfWeekNumber(sub);
if (sval < 0) {
throw new ParseException("Invalid Day-of-Week value: '"
+ sub + "'", i);
}
if (s.length() > i + 3) {
c = s.charAt(i + 3);
if (c == '-') {
i += 4;
sub = s.substring(i, i + 3);
eval = getDayOfWeekNumber(sub);
if (eval < 0) {
throw new ParseException(
"Invalid Day-of-Week value: '" + sub
+ "'", i);
}
} else if (c == '#') {
try {
i += 4;
nthdayOfWeek = Integer.parseInt(s.substring(i));
if (nthdayOfWeek < 1 || nthdayOfWeek > 5) {
throw new Exception();
}
} catch (Exception e) {
throw new ParseException(
"A numeric value between 1 and 5 must follow the '#' option",
i);
}
} else if (c == 'L') {
lastdayOfWeek = true;
i++;
}
}
} else {
throw new ParseException(
"Illegal characters for this position: '" + sub + "'",
i);
}
if (eval != -1) {
incr = 1;
}
addToSet(sval, eval, incr, type);
return (i + 3);
}
if (c == '?') {
i++;
if ((i + 1) < s.length()
&& (s.charAt(i) != ' ' && s.charAt(i + 1) != '\t')) {
throw new ParseException("Illegal character after '?': "
+ s.charAt(i), i);
}
if (type != DAY_OF_WEEK && type != DAY_OF_MONTH) {
throw new ParseException(
"'?' can only be specfied for Day-of-Month or Day-of-Week.",
i);
}
if (type == DAY_OF_WEEK && !lastdayOfMonth) {
int val = (Integer) daysOfMonth.last();
if (val == NO_SPEC_INT) {
throw new ParseException(
"'?' can only be specfied for Day-of-Month -OR- Day-of-Week.",
i);
}
}
addToSet(NO_SPEC_INT, -1, 0, type);
return i;
}
if (c == '*' || c == '/') {
if (c == '*' && (i + 1) >= s.length()) {
addToSet(ALL_SPEC_INT, -1, incr, type);
return i + 1;
} else if (c == '/'
&& ((i + 1) >= s.length() || s.charAt(i + 1) == ' ' || s
.charAt(i + 1) == '\t')) {
throw new ParseException("'/' must be followed by an integer.", i);
} else if (c == '*') {
i++;
}
c = s.charAt(i);
if (c == '/') { // is an increment specified?
i++;
if (i >= s.length()) {
throw new ParseException("Unexpected end of string.", i);
}
incr = getNumericValue(s, i);
i++;
if (incr > 10) {
i++;
}
if (incr > 59 && (type == SECOND || type == MINUTE)) {
throw new ParseException("Increment > 60 : " + incr, i);
} else if (incr > 23 && (type == HOUR)) {
throw new ParseException("Increment > 24 : " + incr, i);
} else if (incr > 31 && (type == DAY_OF_MONTH)) {
throw new ParseException("Increment > 31 : " + incr, i);
} else if (incr > 7 && (type == DAY_OF_WEEK)) {
throw new ParseException("Increment > 7 : " + incr, i);
} else if (incr > 12 && (type == MONTH)) {
throw new ParseException("Increment > 12 : " + incr, i);
}
} else {
incr = 1;
}
addToSet(ALL_SPEC_INT, -1, incr, type);
return i;
} else if (c == 'L') {
i++;
if (type == DAY_OF_MONTH) {
lastdayOfMonth = true;
}
if (type == DAY_OF_WEEK) {
addToSet(7, 7, 0, type);
}
if (type == DAY_OF_MONTH && s.length() > i) {
c = s.charAt(i);
if (c == 'W') {
nearestWeekday = true;
i++;
}
}
return i;
} else if (c >= '0' && c <= '9') {
int val = Integer.parseInt(String.valueOf(c));
i++;
if (i >= s.length()) {
addToSet(val, -1, -1, type);
} else {
c = s.charAt(i);
if (c >= '0' && c <= '9') {
ValueSet vs = getValue(val, s, i);
val = vs.value;
i = vs.pos;
}
i = checkNext(i, s, val, type);
return i;
}
} else {
throw new ParseException("Unexpected character: " + c, i);
}
return i;
}
protected int checkNext(int pos, String s, int val, int type)
throws ParseException {
int end = -1;
int i = pos;
if (i >= s.length()) {
addToSet(val, end, -1, type);
return i;
}
char c = s.charAt(pos);
if (c == 'L') {
if (type == DAY_OF_WEEK) {
lastdayOfWeek = true;
} else {
throw new ParseException("'L' option is not valid here. (pos=" + i + ")", i);
}
TreeSet set = getSet(type);
set.add(val);
i++;
return i;
}
if (c == 'W') {
if (type == DAY_OF_MONTH) {
nearestWeekday = true;
} else {
throw new ParseException("'W' option is not valid here. (pos=" + i + ")", i);
}
TreeSet set = getSet(type);
set.add(val);
i++;
return i;
}
if (c == '#') {
if (type != DAY_OF_WEEK) {
throw new ParseException("'#' option is not valid here. (pos=" + i + ")", i);
}
i++;
try {
nthdayOfWeek = Integer.parseInt(s.substring(i));
if (nthdayOfWeek < 1 || nthdayOfWeek > 5) {
throw new Exception();
}
} catch (Exception e) {
throw new ParseException(
"A numeric value between 1 and 5 must follow the '#' option",
i);
}
TreeSet set = getSet(type);
set.add(val);
i++;
return i;
}
if (c == '-') {
i++;
c = s.charAt(i);
int v = Integer.parseInt(String.valueOf(c));
end = v;
i++;
if (i >= s.length()) {
addToSet(val, end, 1, type);
return i;
}
c = s.charAt(i);
if (c >= '0' && c <= '9') {
ValueSet vs = getValue(v, s, i);
int v1 = vs.value;
end = v1;
i = vs.pos;
}
if (i < s.length() && ((c = s.charAt(i)) == '/')) {
i++;
c = s.charAt(i);
int v2 = Integer.parseInt(String.valueOf(c));
i++;
if (i >= s.length()) {
addToSet(val, end, v2, type);
return i;
}
c = s.charAt(i);
if (c >= '0' && c <= '9') {
ValueSet vs = getValue(v2, s, i);
int v3 = vs.value;
addToSet(val, end, v3, type);
i = vs.pos;
return i;
} else {
addToSet(val, end, v2, type);
return i;
}
} else {
addToSet(val, end, 1, type);
return i;
}
}
if (c == '/') {
i++;
c = s.charAt(i);
int v2 = Integer.parseInt(String.valueOf(c));
i++;
if (i >= s.length()) {
addToSet(val, end, v2, type);
return i;
}
c = s.charAt(i);
if (c >= '0' && c <= '9') {
ValueSet vs = getValue(v2, s, i);
int v3 = vs.value;
addToSet(val, end, v3, type);
i = vs.pos;
return i;
} else {
throw new ParseException("Unexpected character '" + c + "' after '/'", i);
}
}
addToSet(val, end, 0, type);
i++;
return i;
}
public String getCronExpression() {
return cronExpression;
}
public String getExpressionSummary() {
StringBuilder buf = new StringBuilder();
buf.append("seconds: ");
buf.append(getExpressionSetSummary(seconds));
buf.append("\n");
buf.append("minutes: ");
buf.append(getExpressionSetSummary(minutes));
buf.append("\n");
buf.append("hours: ");
buf.append(getExpressionSetSummary(hours));
buf.append("\n");
buf.append("daysOfMonth: ");
buf.append(getExpressionSetSummary(daysOfMonth));
buf.append("\n");
buf.append("months: ");
buf.append(getExpressionSetSummary(months));
buf.append("\n");
buf.append("daysOfWeek: ");
buf.append(getExpressionSetSummary(daysOfWeek));
buf.append("\n");
buf.append("lastdayOfWeek: ");
buf.append(lastdayOfWeek);
buf.append("\n");
buf.append("nearestWeekday: ");
buf.append(nearestWeekday);
buf.append("\n");
buf.append("NthDayOfWeek: ");
buf.append(nthdayOfWeek);
buf.append("\n");
buf.append("lastdayOfMonth: ");
buf.append(lastdayOfMonth);
buf.append("\n");
buf.append("years: ");
buf.append(getExpressionSetSummary(years));
buf.append("\n");
return buf.toString();
}
protected String getExpressionSetSummary(java.util.Set set) {
if (set.contains(NO_SPEC)) {
return "?";
}
if (set.contains(ALL_SPEC)) {
return "*";
}
StringBuilder buf = new StringBuilder();
Iterator itr = set.iterator();
boolean first = true;
while (itr.hasNext()) {
Integer iVal = (Integer) itr.next();
String val = iVal.toString();
if (!first) {
buf.append(",");
}
buf.append(val);
first = false;
}
return buf.toString();
}
protected String getExpressionSetSummary(java.util.ArrayList list) {
if (list.contains(NO_SPEC)) {
return "?";
}
if (list.contains(ALL_SPEC)) {
return "*";
}
StringBuilder buf = new StringBuilder();
Iterator itr = list.iterator();
boolean first = true;
while (itr.hasNext()) {
Integer iVal = (Integer) itr.next();
String val = iVal.toString();
if (!first) {
buf.append(",");
}
buf.append(val);
first = false;
}
return buf.toString();
}
protected int skipWhiteSpace(int i, String s) {
for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) {
;
}
return i;
}
protected int findNextWhiteSpace(int i, String s) {
for (; i < s.length() && (s.charAt(i) != ' ' || s.charAt(i) != '\t'); i++) {
;
}
return i;
}
protected void addToSet(int val, int end, int incr, int type)
throws ParseException {
TreeSet set = getSet(type);
if (type == SECOND || type == MINUTE) {
if ((val < 0 || val > 59 || end > 59) && (val != ALL_SPEC_INT)) {
throw new ParseException(
"Minute and Second values must be between 0 and 59",
-1);
}
} else if (type == HOUR) {
if ((val < 0 || val > 23 || end > 23) && (val != ALL_SPEC_INT)) {
throw new ParseException(
"Hour values must be between 0 and 23", -1);
}
} else if (type == DAY_OF_MONTH) {
if ((val < 1 || val > 31 || end > 31) && (val != ALL_SPEC_INT)
&& (val != NO_SPEC_INT)) {
throw new ParseException(
"Day of month values must be between 1 and 31", -1);
}
} else if (type == MONTH) {
if ((val < 1 || val > 12 || end > 12) && (val != ALL_SPEC_INT)) {
throw new ParseException(
"Month values must be between 1 and 12", -1);
}
} else if (type == DAY_OF_WEEK) {
if ((val == 0 || val > 7 || end > 7) && (val != ALL_SPEC_INT)
&& (val != NO_SPEC_INT)) {
throw new ParseException(
"Day-of-Week values must be between 1 and 7", -1);
}
}
if ((incr == 0 || incr == -1) && val != ALL_SPEC_INT) {
if (val != -1) {
set.add(val);
} else {
set.add(NO_SPEC);
}
return;
}
int startAt = val;
int stopAt = end;
if (val == ALL_SPEC_INT && incr <= 0) {
incr = 1;
set.add(ALL_SPEC); // put in a marker, but also fill values
}
if (type == SECOND || type == MINUTE) {
if (stopAt == -1) {
stopAt = 59;
}
if (startAt == -1 || startAt == ALL_SPEC_INT) {
startAt = 0;
}
} else if (type == HOUR) {
if (stopAt == -1) {
stopAt = 23;
}
if (startAt == -1 || startAt == ALL_SPEC_INT) {
startAt = 0;
}
} else if (type == DAY_OF_MONTH) {
if (stopAt == -1) {
stopAt = 31;
}
if (startAt == -1 || startAt == ALL_SPEC_INT) {
startAt = 1;
}
} else if (type == MONTH) {
if (stopAt == -1) {
stopAt = 12;
}
if (startAt == -1 || startAt == ALL_SPEC_INT) {
startAt = 1;
}
} else if (type == DAY_OF_WEEK) {
if (stopAt == -1) {
stopAt = 7;
}
if (startAt == -1 || startAt == ALL_SPEC_INT) {
startAt = 1;
}
} else if (type == YEAR) {
if (stopAt == -1) {
stopAt = YEAR_TO_GIVEUP_SCHEDULING_AT;
}
if (startAt == -1 || startAt == ALL_SPEC_INT) {
// needs to start at 1969 because timezones can make dates before 1970
startAt = 1969;
}
}
// if the end of the range is before the start, then we need to overflow into
// the next day, month etc. This is done by adding the maximum amount for that
// type, and using modulus max to determine the value being added.
int max = -1;
if (stopAt < startAt) {
switch (type) {
case SECOND:
case MINUTE:
max = 60;
break;
case HOUR:
max = 24;
break;
case MONTH:
max = 12;
break;
case DAY_OF_WEEK:
max = 7;
break;
case DAY_OF_MONTH:
max = 31;
break;
case YEAR:
throw new IllegalArgumentException("Start year must be less than stop year");
default:
throw new IllegalArgumentException("Unexpected type encountered");
}
stopAt += max;
}
for (int i = startAt; i <= stopAt; i += incr) {
if (max == -1) {
// ie: there's no max to overflow over
set.add(i);
} else {
// take the modulus to get the real value
int i2 = i % max;
// 1-indexed ranges should not include 0, and should include their max
if (i2 == 0 && (type == MONTH || type == DAY_OF_WEEK || type == DAY_OF_MONTH)) {
i2 = max;
}
set.add(i2);
}
}
}
protected TreeSet getSet(int type) {
switch (type) {
case SECOND:
return seconds;
case MINUTE:
return minutes;
case HOUR:
return hours;
case DAY_OF_MONTH:
return daysOfMonth;
case MONTH:
return months;
case DAY_OF_WEEK:
return daysOfWeek;
case YEAR:
return years;
default:
return null;
}
}
protected ValueSet getValue( int v, String s, int i) {
char c = s.charAt(i);
final StringBuilder valueBuilder = new StringBuilder();
valueBuilder.append(v);
while (c >= '0' && c <= '9') {
valueBuilder.append(c);
i++;
if (i >= s.length()) {
break;
}
c = s.charAt(i);
}
ValueSet val = new ValueSet();
val.pos = (i < s.length()) ? i : i + 1;
val.value = Integer.parseInt(valueBuilder.toString());
return val;
}
protected int getNumericValue(String s, int i) {
int endOfVal = findNextWhiteSpace(i, s);
String val = s.substring(i, endOfVal);
return Integer.parseInt(val);
}
protected int getMonthNumber(String s) {
Integer integer = monthMap.get(s);
if (integer == null) {
return -1;
}
return integer;
}
protected int getDayOfWeekNumber(String s) {
Integer integer = dayMap.get(s);
if (integer == null) {
return -1;
}
return integer;
}
public static class ValueSet {
public int value;
public int pos;
}
}
|
googleapis/google-cloud-java | 36,135 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/UpdateServingConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/serving_config_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Request for UpdateServingConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.UpdateServingConfigRequest}
*/
public final class UpdateServingConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.UpdateServingConfigRequest)
UpdateServingConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateServingConfigRequest.newBuilder() to construct.
private UpdateServingConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateServingConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateServingConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateServingConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateServingConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.UpdateServingConfigRequest.class,
com.google.cloud.retail.v2beta.UpdateServingConfigRequest.Builder.class);
}
private int bitField0_;
public static final int SERVING_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.retail.v2beta.ServingConfig servingConfig_;
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servingConfig field is set.
*/
@java.lang.Override
public boolean hasServingConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servingConfig.
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.ServingConfig getServingConfig() {
return servingConfig_ == null
? com.google.cloud.retail.v2beta.ServingConfig.getDefaultInstance()
: servingConfig_;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.ServingConfigOrBuilder getServingConfigOrBuilder() {
return servingConfig_ == null
? com.google.cloud.retail.v2beta.ServingConfig.getDefaultInstance()
: servingConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getServingConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getServingConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.UpdateServingConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.UpdateServingConfigRequest other =
(com.google.cloud.retail.v2beta.UpdateServingConfigRequest) obj;
if (hasServingConfig() != other.hasServingConfig()) return false;
if (hasServingConfig()) {
if (!getServingConfig().equals(other.getServingConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasServingConfig()) {
hash = (37 * hash) + SERVING_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getServingConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2beta.UpdateServingConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for UpdateServingConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.UpdateServingConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.UpdateServingConfigRequest)
com.google.cloud.retail.v2beta.UpdateServingConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateServingConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateServingConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.UpdateServingConfigRequest.class,
com.google.cloud.retail.v2beta.UpdateServingConfigRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2beta.UpdateServingConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getServingConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
servingConfig_ = null;
if (servingConfigBuilder_ != null) {
servingConfigBuilder_.dispose();
servingConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateServingConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateServingConfigRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.UpdateServingConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateServingConfigRequest build() {
com.google.cloud.retail.v2beta.UpdateServingConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateServingConfigRequest buildPartial() {
com.google.cloud.retail.v2beta.UpdateServingConfigRequest result =
new com.google.cloud.retail.v2beta.UpdateServingConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2beta.UpdateServingConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.servingConfig_ =
servingConfigBuilder_ == null ? servingConfig_ : servingConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.UpdateServingConfigRequest) {
return mergeFrom((com.google.cloud.retail.v2beta.UpdateServingConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2beta.UpdateServingConfigRequest other) {
if (other == com.google.cloud.retail.v2beta.UpdateServingConfigRequest.getDefaultInstance())
return this;
if (other.hasServingConfig()) {
mergeServingConfig(other.getServingConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getServingConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.retail.v2beta.ServingConfig servingConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.ServingConfig,
com.google.cloud.retail.v2beta.ServingConfig.Builder,
com.google.cloud.retail.v2beta.ServingConfigOrBuilder>
servingConfigBuilder_;
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servingConfig field is set.
*/
public boolean hasServingConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servingConfig.
*/
public com.google.cloud.retail.v2beta.ServingConfig getServingConfig() {
if (servingConfigBuilder_ == null) {
return servingConfig_ == null
? com.google.cloud.retail.v2beta.ServingConfig.getDefaultInstance()
: servingConfig_;
} else {
return servingConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServingConfig(com.google.cloud.retail.v2beta.ServingConfig value) {
if (servingConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
servingConfig_ = value;
} else {
servingConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServingConfig(
com.google.cloud.retail.v2beta.ServingConfig.Builder builderForValue) {
if (servingConfigBuilder_ == null) {
servingConfig_ = builderForValue.build();
} else {
servingConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeServingConfig(com.google.cloud.retail.v2beta.ServingConfig value) {
if (servingConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& servingConfig_ != null
&& servingConfig_
!= com.google.cloud.retail.v2beta.ServingConfig.getDefaultInstance()) {
getServingConfigBuilder().mergeFrom(value);
} else {
servingConfig_ = value;
}
} else {
servingConfigBuilder_.mergeFrom(value);
}
if (servingConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearServingConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
servingConfig_ = null;
if (servingConfigBuilder_ != null) {
servingConfigBuilder_.dispose();
servingConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2beta.ServingConfig.Builder getServingConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getServingConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2beta.ServingConfigOrBuilder getServingConfigOrBuilder() {
if (servingConfigBuilder_ != null) {
return servingConfigBuilder_.getMessageOrBuilder();
} else {
return servingConfig_ == null
? com.google.cloud.retail.v2beta.ServingConfig.getDefaultInstance()
: servingConfig_;
}
}
/**
*
*
* <pre>
* Required. The ServingConfig to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.ServingConfig,
com.google.cloud.retail.v2beta.ServingConfig.Builder,
com.google.cloud.retail.v2beta.ServingConfigOrBuilder>
getServingConfigFieldBuilder() {
if (servingConfigBuilder_ == null) {
servingConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.ServingConfig,
com.google.cloud.retail.v2beta.ServingConfig.Builder,
com.google.cloud.retail.v2beta.ServingConfigOrBuilder>(
getServingConfig(), getParentForChildren(), isClean());
servingConfig_ = null;
}
return servingConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [ServingConfig][google.cloud.retail.v2beta.ServingConfig] to update. The
* following are NOT supported:
*
* * [ServingConfig.name][google.cloud.retail.v2beta.ServingConfig.name]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.UpdateServingConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.UpdateServingConfigRequest)
private static final com.google.cloud.retail.v2beta.UpdateServingConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.UpdateServingConfigRequest();
}
public static com.google.cloud.retail.v2beta.UpdateServingConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateServingConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateServingConfigRequest>() {
@java.lang.Override
public UpdateServingConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateServingConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateServingConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateServingConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,143 | java-eventarc/proto-google-cloud-eventarc-v1/src/main/java/com/google/cloud/eventarc/v1/UpdateGoogleChannelConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/eventarc/v1/eventarc.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.eventarc.v1;
/**
*
*
* <pre>
* The request message for the UpdateGoogleChannelConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest}
*/
public final class UpdateGoogleChannelConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest)
UpdateGoogleChannelConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateGoogleChannelConfigRequest.newBuilder() to construct.
private UpdateGoogleChannelConfigRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateGoogleChannelConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateGoogleChannelConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateGoogleChannelConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateGoogleChannelConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.class,
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.Builder.class);
}
private int bitField0_;
public static final int GOOGLE_CHANNEL_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.eventarc.v1.GoogleChannelConfig googleChannelConfig_;
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the googleChannelConfig field is set.
*/
@java.lang.Override
public boolean hasGoogleChannelConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The googleChannelConfig.
*/
@java.lang.Override
public com.google.cloud.eventarc.v1.GoogleChannelConfig getGoogleChannelConfig() {
return googleChannelConfig_ == null
? com.google.cloud.eventarc.v1.GoogleChannelConfig.getDefaultInstance()
: googleChannelConfig_;
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.eventarc.v1.GoogleChannelConfigOrBuilder
getGoogleChannelConfigOrBuilder() {
return googleChannelConfig_ == null
? com.google.cloud.eventarc.v1.GoogleChannelConfig.getDefaultInstance()
: googleChannelConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getGoogleChannelConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGoogleChannelConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest other =
(com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest) obj;
if (hasGoogleChannelConfig() != other.hasGoogleChannelConfig()) return false;
if (hasGoogleChannelConfig()) {
if (!getGoogleChannelConfig().equals(other.getGoogleChannelConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGoogleChannelConfig()) {
hash = (37 * hash) + GOOGLE_CHANNEL_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getGoogleChannelConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for the UpdateGoogleChannelConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest)
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateGoogleChannelConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateGoogleChannelConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.class,
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.Builder.class);
}
// Construct using com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGoogleChannelConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
googleChannelConfig_ = null;
if (googleChannelConfigBuilder_ != null) {
googleChannelConfigBuilder_.dispose();
googleChannelConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_UpdateGoogleChannelConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest build() {
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest buildPartial() {
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest result =
new com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.googleChannelConfig_ =
googleChannelConfigBuilder_ == null
? googleChannelConfig_
: googleChannelConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest) {
return mergeFrom((com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest other) {
if (other
== com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest.getDefaultInstance())
return this;
if (other.hasGoogleChannelConfig()) {
mergeGoogleChannelConfig(other.getGoogleChannelConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getGoogleChannelConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.eventarc.v1.GoogleChannelConfig googleChannelConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.eventarc.v1.GoogleChannelConfig,
com.google.cloud.eventarc.v1.GoogleChannelConfig.Builder,
com.google.cloud.eventarc.v1.GoogleChannelConfigOrBuilder>
googleChannelConfigBuilder_;
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the googleChannelConfig field is set.
*/
public boolean hasGoogleChannelConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The googleChannelConfig.
*/
public com.google.cloud.eventarc.v1.GoogleChannelConfig getGoogleChannelConfig() {
if (googleChannelConfigBuilder_ == null) {
return googleChannelConfig_ == null
? com.google.cloud.eventarc.v1.GoogleChannelConfig.getDefaultInstance()
: googleChannelConfig_;
} else {
return googleChannelConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGoogleChannelConfig(com.google.cloud.eventarc.v1.GoogleChannelConfig value) {
if (googleChannelConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
googleChannelConfig_ = value;
} else {
googleChannelConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGoogleChannelConfig(
com.google.cloud.eventarc.v1.GoogleChannelConfig.Builder builderForValue) {
if (googleChannelConfigBuilder_ == null) {
googleChannelConfig_ = builderForValue.build();
} else {
googleChannelConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGoogleChannelConfig(
com.google.cloud.eventarc.v1.GoogleChannelConfig value) {
if (googleChannelConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& googleChannelConfig_ != null
&& googleChannelConfig_
!= com.google.cloud.eventarc.v1.GoogleChannelConfig.getDefaultInstance()) {
getGoogleChannelConfigBuilder().mergeFrom(value);
} else {
googleChannelConfig_ = value;
}
} else {
googleChannelConfigBuilder_.mergeFrom(value);
}
if (googleChannelConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGoogleChannelConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
googleChannelConfig_ = null;
if (googleChannelConfigBuilder_ != null) {
googleChannelConfigBuilder_.dispose();
googleChannelConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.eventarc.v1.GoogleChannelConfig.Builder
getGoogleChannelConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGoogleChannelConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.eventarc.v1.GoogleChannelConfigOrBuilder
getGoogleChannelConfigOrBuilder() {
if (googleChannelConfigBuilder_ != null) {
return googleChannelConfigBuilder_.getMessageOrBuilder();
} else {
return googleChannelConfig_ == null
? com.google.cloud.eventarc.v1.GoogleChannelConfig.getDefaultInstance()
: googleChannelConfig_;
}
}
/**
*
*
* <pre>
* Required. The config to be updated.
* </pre>
*
* <code>
* .google.cloud.eventarc.v1.GoogleChannelConfig google_channel_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.eventarc.v1.GoogleChannelConfig,
com.google.cloud.eventarc.v1.GoogleChannelConfig.Builder,
com.google.cloud.eventarc.v1.GoogleChannelConfigOrBuilder>
getGoogleChannelConfigFieldBuilder() {
if (googleChannelConfigBuilder_ == null) {
googleChannelConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.eventarc.v1.GoogleChannelConfig,
com.google.cloud.eventarc.v1.GoogleChannelConfig.Builder,
com.google.cloud.eventarc.v1.GoogleChannelConfigOrBuilder>(
getGoogleChannelConfig(), getParentForChildren(), isClean());
googleChannelConfig_ = null;
}
return googleChannelConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The fields to be updated; only fields explicitly provided are updated.
* If no field mask is provided, all provided fields in the request are
* updated. To update all fields, provide a field mask of "*".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest)
private static final com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest();
}
public static com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateGoogleChannelConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateGoogleChannelConfigRequest>() {
@java.lang.Override
public UpdateGoogleChannelConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateGoogleChannelConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateGoogleChannelConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.UpdateGoogleChannelConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/nosql | 35,935 | kvtest/kvstore-IT/src/main/java/oracle/kv/impl/util/recovery/SNRecoverTest.java | /*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2011, 2025 Oracle and/or its affiliates. All rights reserved.
*
*/
package oracle.kv.impl.util.recovery;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.nio.file.Files;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.TimeZone;
import oracle.kv.TestBase;
import oracle.kv.impl.admin.CommandServiceAPI;
import oracle.kv.impl.admin.param.Parameters;
import oracle.kv.impl.admin.param.StorageNodeParams;
import oracle.kv.impl.param.ParameterMap;
import oracle.kv.impl.param.ParameterState;
import oracle.kv.impl.topo.AdminId;
import oracle.kv.impl.topo.StorageNodeId;
import oracle.kv.impl.util.FileUtils;
import oracle.kv.impl.util.registry.RegistryUtils;
import oracle.kv.impl.util.server.LoggerUtils;
import oracle.kv.util.CreateStore;
import oracle.kv.util.TestUtils;
import oracle.kv.util.recovery.ARTRequiredFiles;
import oracle.kv.util.recovery.AdminRecover;
import oracle.kv.util.recovery.RecoverConfig;
import oracle.kv.util.recovery.SNRecover;
import com.sleepycat.je.BackupFSArchiveCopy;
import com.sleepycat.je.dbi.BackupManager;
import com.sleepycat.je.utilint.CronScheduleParser;
import org.junit.Test;
/**
* SNRecover Utility Test
*/
public class SNRecoverTest extends TestBase {
private final static String STORE_NAME = "SNRecoverTest";
private CreateStore createStore = null;
private File inputConfigFile = null;
private File backupConfigFile = null;
private File backupDirectory= null;
private File outputAdminDir = null;
private final static String jeRecoverCopyClass =
"com.sleepycat.je.RecoverArchiveFSCopy";
private final static String failingRecoverCopyClass =
"oracle.kv.impl.util.recovery.FailingObjectStorageCopy";
private static final long HOUR_MILLIS = 60 * 60 * 1000;
/**
* Date formatter to print dates in human readable format. Synchronize on
* this object when using it.
*/
private static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSSZ");
static {
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
@Override
public void setUp()
throws Exception {
super.setUp();
/*
* Delete temporary created files from previous runs.
*/
final File ansZipFile = new File("/tmp/ans.zip");
if (ansZipFile.exists()) {
Files.delete(ansZipFile.toPath());
}
final File recoverConfigZipFile = new File("/tmp/recoverconfig.zip");
if (recoverConfigZipFile.exists()) {
Files.delete(recoverConfigZipFile.toPath());
}
final File topologyOutputJSON = new File("/tmp/topologyoutput.json");
if (topologyOutputJSON.exists()) {
Files.delete(topologyOutputJSON.toPath());
}
final File artrequiredfiles = new File("/tmp/artrequiredfiles");
if (artrequiredfiles.exists()) {
FileUtils.deleteDirectory(artrequiredfiles);
}
final File recoverConfig = new File("/tmp/recoverconfig");
if (recoverConfig.exists()) {
FileUtils.deleteDirectory(recoverConfig);
}
final File copyBackupDir = new File("/tmp/copybackup");
if (copyBackupDir.exists()) {
FileUtils.deleteDirectory(copyBackupDir);
}
RegistryUtils.clearRegistryCSF();
}
@Override
public void tearDown()
throws Exception {
super.tearDown();
if (createStore != null) {
createStore.shutdown();
}
if (inputConfigFile != null) {
Files.delete(inputConfigFile.toPath());
}
if (backupConfigFile != null) {
Files.delete(backupConfigFile.toPath());
}
if (backupDirectory != null) {
FileUtils.deleteDirectory(
new File(backupDirectory.getParent()));
}
if (outputAdminDir != null) {
FileUtils.deleteDirectory(outputAdminDir);
}
LoggerUtils.closeAllHandlers();
}
/*
* All of the tests are divided into following categories :
*
* [] Bad -config, -requiredfile, topologyfile and -hostname arguments,
* input config file, requiredfile, topology file existence check.
* [] Incorrect/Unsupported value passed in input config file. We are
* supporting baseArchivePath and recoveryCopyClass in config file.
* [] Generate requiredfiles.json and topologyoutput.json file after
* running ARTRequiredFiles and RecoverConfig on a 1x3 store having
* admin shard with scheduled backup enabled. With requiredfiles.json,
* topologyoutput.json and hostname as input argument, copy winner
* replication and admin node jdb files in storage directory location.
*/
/**
* Bad Arguments SNRecover tests
* @throws Exception
*/
@Test
public void testBadArguments() throws Exception {
testSNRecoverEmptyArgument();
testSNRecoverNoConfigFlag();
testSNRecoverNoRequiredFileFlag();
testSNRecoverNoTopologyFileFlag();
testSNRecoverNoHostNameFlag();
testSNRecoverNoConfigArgument();
testSNRecoverNoRequiredFileArgument();
testSNRecoverNoTopologyFileArgument();
testSNRecoverNoHostNameArgument();
testSNRecoverEmptyConfigArgument();
testSNRecoverEmptyRequiredFileArgument();
testSNRecoverEmptyTopologyFileArgument();
testSNRecoverEmptyHostNameArgument();
testSNRecoverInvalidConfigPathArgument();
testSNRecoverInvalidRequiredFileArgument();
testSNRecoverInvalidTopologyFileArgument();
testSNRecoverWrongArgument();
testSNRecoverConfigFileNotExist();
testSNRecoverRequiredFileNotExist();
testSNRecoverTopologyFileNotExist();
}
private void testSNRecoverEmptyArgument() throws Exception {
final String[] argv = {};
final String message = "Empty argument list";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoConfigFlag() throws Exception {
final String[] argv = {"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname", "localhost"};
final String message = "-config flag argument not specified";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoRequiredFileFlag() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname", "localhost"};
final String message = "-requiredfile flag argument not specified";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoTopologyFileFlag() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", "/tmp/requiredfile.json",
"-hostname", "localhost"};
final String message = "-topologyfile flag argument not specified";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoHostNameFlag() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json"};
final String message = "-hostname argument not specified";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoConfigArgument() throws Exception {
final String[] argv = {"-config"};
final String message = "-config requires an argument";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoRequiredFileArgument() throws Exception {
final String[] argv = {"-requiredfile"};
final String message = "-requiredfile requires an argument";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoTopologyFileArgument() throws Exception {
final String[] argv = {"-topologyfile"};
final String message = "-topologyfile requires an argument";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverNoHostNameArgument() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname"};
final String message = "-hostname requires an argument";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverEmptyConfigArgument() throws Exception {
final String[] argv = {"-config", ""};
final String message =
"Config file path name must not be empty";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverEmptyRequiredFileArgument() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", ""};
final String message =
"RequiredFile path must not be empty";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverEmptyTopologyFileArgument()
throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-topologyfile", ""};
final String message =
"topology output json file path must not be empty";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverEmptyHostNameArgument() throws Exception {
final String[] argv =
{"-config", "/tmp/recoverproperties",
"-hostname", ""};
final String message = "hostname of the storage node not be empty";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverInvalidConfigPathArgument()
throws Exception {
final String[] argv = {"-config", "tmp",
"-requiredfile", "/tmp/requiredfile.json"};
final String message =
"Config file path name must be an absolute path";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverInvalidRequiredFileArgument()
throws Exception {
final String[] argv = {"-requiredfile", "tmp"};
final String message =
"RequiredFile path must be an absolute path";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverInvalidTopologyFileArgument()
throws Exception {
final String[] argv = {"-topologyfile", "tmp"};
final String message =
"topology output json file path must be an absolute path";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverWrongArgument() throws Exception {
final String[] argv = {"-abc"};
final String message = "-abc is not a supported option.";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverConfigFileNotExist() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname", "localhost"};
final File inputDir = new File("/tmp/recoverproperties");
if (inputDir.exists()) {
FileUtils.deleteDirectory(inputDir);
}
final String message = "Specified input config file " +
inputDir.getAbsolutePath() +
" does not exist";
validateSNRecoverOutput(argv, message);
}
private void testSNRecoverRequiredFileNotExist() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname", "localhost"};
final File inputConfigDir = new File("/tmp/recoverproperties");
if (!inputConfigDir.exists()) {
RecoverConfig.makeDir(inputConfigDir);
}
final File inputDir = new File("/tmp/requiredfile.json");
if (inputDir.exists()) {
assertTrue(FileUtils.deleteDirectory(inputDir));
}
final String message = "Specified requiredfile json file " +
inputDir.getAbsolutePath() +
" does not exist";
validateSNRecoverOutput(argv, message);
Files.delete(inputConfigDir.toPath());
}
private void testSNRecoverTopologyFileNotExist() throws Exception {
final String[] argv = {"-config", "/tmp/recoverproperties",
"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname", "localhost"};
final File inputConfigDir = new File("/tmp/recoverproperties");
if (!inputConfigDir.exists()) {
Files.createFile(inputConfigDir.toPath());
}
final File inputRequiredDir = new File("/tmp/requiredfile.json");
if (!inputRequiredDir.exists()) {
Files.createFile(inputRequiredDir.toPath());
}
final File inputDir = new File("/tmp/topologyoutput.json");
if (inputDir.exists()) {
assertTrue(FileUtils.deleteDirectory(inputDir));
}
final String message = "Specified topologyoutput json file " +
inputDir.getAbsolutePath() +
" does not exist";
validateSNRecoverOutput(argv, message);
Files.delete(inputConfigDir.toPath());
Files.delete(inputRequiredDir.toPath());
}
private void validateSNRecoverOutput(String[] argument,
String message) {
ByteArrayOutputStream output = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(output);
PrintStream originalSysErr = System.err;
System.setErr(ps);
try {
assertFalse(SNRecover.mainInternal(argument));
assertTrue(output.toString().contains(message));
} finally {
System.setErr(originalSysErr);
}
}
/**
* Invalid and Unsupported arguments in input config file tests
* @throws Exception
*/
@Test
public void testInvalidInputConfigFileTests() throws Exception {
/*
* Currently in kv internal unit testing we will test recoverCopyClass
* parameter in input config file.
*
* Object Storage specific parameters will be tested as part of
* RecoverObjectStorageCopyTest in spartakv branch.
*
* TODO : Test needs to be updated as we support more parameters
* in input config file.
*/
testEmptyConfigFileCheck();
}
private void testEmptyConfigFileCheck() throws IOException {
Properties props = new Properties();
checkInvalidConfigFileProperties(props);
}
private void checkInvalidConfigFileProperties(Properties props)
throws IOException {
inputConfigFile = File.createTempFile("config", ".properties");
final File requiredfile = new File("/tmp/requiredfile.json");
requiredfile.createNewFile();
final File topologyfile = new File("/tmp/topologyoutput.json");
topologyfile.createNewFile();
writeProps(props);
final String [] argv = {"-config", inputConfigFile.toString(),
"-requiredfile", "/tmp/requiredfile.json",
"-topologyfile", "/tmp/topologyoutput.json",
"-hostname", "localhost"};
SNRecover snrecover = new SNRecover();
TestUtils.checkException(
() -> snrecover.parseArgs(argv),
IllegalArgumentException.class);
Files.delete(requiredfile.toPath());
}
/**
* Running SNRecover test. Check for running of SNRecover utility
* and copy of winner admin and replication node jdb files at storage
* directory.
* Also running SNRecover failure scenario.
* @throws Exception
*/
@Test
public void testSNRecoverTest() throws Exception {
/*
* --Create a 1x3 kvstore with admin shard
* --Enable scheduled backups with time multiplier set
* --Run ARTRequiredFiles Utility with given target
* recovery time and base directory
* --Check for generation of art.json and
* <kvstore>_requiredfiles.json under target
* zip file.
* --Run AdminRecover to copy the winner node admin
* jdb files identified in requiredfile.json.
* --Run RecoverConfig on copied admin jdb file to
* generate the topologyoutput.json
* --With requirefiles.json and topologyoutput.json, copy
* the jdb files for replication and admin node at
* respective storage directory.
*/
/* Start a 1x3 store with admin shard */
createStore = new CreateStore(STORE_NAME,
5240, // random port to start at
3, // numSNs
3, // RF
3, // numPartitions
1, // capacity
CreateStore.MB_PER_SN,
true, /* useThreads */
null,
true,
SECURITY_ENABLE);
createStore.start();
final CommandServiceAPI cs = createStore.getAdmin();
final Parameters params = cs.getParameters();
/*
* Create je.backup.copyConfig properties file
*/
backupConfigFile =
File.createTempFile("backupconfig", ".properties");
Properties copyProps = new Properties();
copyProps.setProperty(BackupFSArchiveCopy.CHECKSUM_KEY, "SHA1");
try (OutputStream out = new FileOutputStream(backupConfigFile)) {
copyProps.store(out, "Copy config file");
}
/*
* Create je.backup.locationConfig base directory
*/
backupDirectory =
new File("/tmp/backup/" + cs.getStoreName());
RecoverConfig.makeDir(backupDirectory);
/*
* Setting up the schedule for backups. We are following
* same method as done in BackupManagerTest.testBasics()
*/
final String schedule = "0 * * * *";
BackupManager.timeMultiplier = HOUR_MILLIS / 2000;
CronScheduleParser parser =
BackupManager.createSnapshotScheduleParser(schedule);
/*
* Re-starts all RNs and Admins with configProperties
*/
final String jeParameterValue =
"je.env.runBackup true; je.backup.schedule " + schedule + "; "
+ "je.backup.copyClass com.sleepycat.je.BackupFSArchiveCopy; "
+ "je.backup.copyConfig " + backupConfigFile.toString() + "; "
+ "je.backup.locationClass "
+ "com.sleepycat.je.BackupFileLocation; "
+ "je.backup.locationConfig "
+ backupDirectory.getAbsolutePath();
ParameterMap map = new ParameterMap();
map.setParameter(ParameterState.JE_MISC, jeParameterValue);
final Set<AdminId> adminList = params.getAdminIds();
/*
* Change params for admin sequentially rather than using
* createChangeAllAdminsPlan i.e --all-admins
*/
int p;
for (AdminId admin : adminList) {
if (admin.toString().equals("admin1")) {
/*
* We are setting backup specific parameters on
* two admins out of three in admin shard.
*
* Setting and restarting Master admin leading to
* some issues. In TODO List.
*/
continue;
}
p = cs.createChangeParamsPlan("changeAdminParams", admin, map);
cs.approvePlan(p);
cs.executePlan(p, false);
cs.awaitPlan(p, 0, null);
cs.assertSuccess(p);
}
/*
* change params for RNs in one go using createChangeAllParamsPlan
* i.e. --all-rns
*/
p = cs.createChangeAllParamsPlan("changeAllParams", null, map);
cs.approvePlan(p);
cs.executePlan(p, false);
cs.awaitPlan(p, 0, null);
cs.assertSuccess(p);
/*
* Wait until 10 ms (real time) after the start of the next
* backup
*/
BackupManager.sleepMs(
parser.getDelayTime() + 10*BackupManager.timeMultiplier);
/*
* At this point with restart of the nodes i.e admins and RNs
* scheduled backups have been started in backup Directory.
* We need to arrive at a target recovery time so that zip
* file can be generated.
*
* Wait until 90% of the way into the tenth backup period.
* Tenth backup period has been selected on conservative
* basis so that we can get positive sleep duration.
*/
long firstBackup = parser.getTime() + parser.getDelayTime();
long sleep = (long) (10.90 * parser.getInterval()) -
/* Subtract time since start of first backup */
(BackupManager.currentTimeMs() - firstBackup);
BackupManager.sleepMs(sleep);
/*
* We can have current time as target recovery time because there
* will be at least 9-10 backups taken by that time. So we should
* be able to derive at an ART from current time.
*/
final long currentTime = BackupManager.currentTimeMs();
final String timeString = formatTime(currentTime);
int idx = timeString.indexOf('-');
final String targetRecoveryTime =
timeString.substring(idx - 2, idx) + /* year (last two digits) */
timeString.substring(idx + 1, idx + 3) + /* month */
timeString.substring(idx + 4, idx + 6) + /* date */
timeString.substring(idx + 7, idx + 9); /* hour */
/*
* Create Recovery input config file with right parameters
*/
Properties props = new Properties();
props.setProperty(ARTRequiredFiles.BASEDIR_KEY, "/tmp/copybackup");
props.setProperty(ARTRequiredFiles.RECOVERY_COPY_CLASS_KEY,
jeRecoverCopyClass);
inputConfigFile = File.createTempFile("recoverconfig", ".properties");
writeProps(props);
/*
* Before running adminrecover, we need to change the directory
* format of the backups because JE backups directory format is
* different from cloud backups directory. Recovery enhancements
* are for the cloud backups directory format.
*
* JE : for admin1 : /tmp/backup/SNRecoverTest/1/...
* for rg1-rn1 : /tmp/backup/SNRecoverTest/rg1-rn1/....
*
* Cloud : for admin1 : /tmp/backup/SNRecoverTest/admin1
* for rg1-rn1 : /tmp/backup/SNRecoverTest/rg1/rn1/...
*/
File currentAdmin2 = new File("/tmp/backup/SNRecoverTest/2");
File admin2 = new File("/tmp/copybackup/SNRecoverTest/admin2");
RecoverConfig.makeDir(admin2);
File currentAdmin3 = new File("/tmp/backup/SNRecoverTest/3");
File admin3 = new File("/tmp/copybackup/SNRecoverTest/admin3");
RecoverConfig.makeDir(admin3);
File currentrg1rn1 = new File("/tmp/backup/SNRecoverTest/rg1-rn1");
File rg1_rn1 = new File("/tmp/copybackup/SNRecoverTest/rg1/rn1");
RecoverConfig.makeDir(rg1_rn1);
File currentrg1rn2 = new File("/tmp/backup/SNRecoverTest/rg1-rn2");
File rg1_rn2 = new File("/tmp/copybackup/SNRecoverTest/rg1/rn2");
RecoverConfig.makeDir(rg1_rn2);
File currentrg1rn3 = new File("/tmp/backup/SNRecoverTest/rg1-rn3");
File rg1_rn3 = new File("/tmp/copybackup/SNRecoverTest/rg1/rn3");
RecoverConfig.makeDir(rg1_rn3);
FileUtils.copyDir(currentAdmin2, admin2);
FileUtils.copyDir(currentAdmin3, admin3);
FileUtils.copyDir(currentrg1rn1, rg1_rn1);
FileUtils.copyDir(currentrg1rn2, rg1_rn2);
FileUtils.copyDir(currentrg1rn3, rg1_rn3);
final String [] argv = {"-targetRecoveryTime", targetRecoveryTime,
"-config", inputConfigFile.toString(),
"-target", "/tmp/ans.zip"};
assertTrue(ARTRequiredFiles.mainInternal(argv));
/*
* ans.zip having artrequiredfiles directory has been generated.
* Now unzip the ans.zip file.
*/
final File outputDir = new File("/tmp");
checkOutputDirectory(outputDir, "artrequiredfiles");
/*
* ans.zip has been generated. Now we will run adminrecover with
* following parameters so that winner node admin jdb file has
* been copied in the target path.
*
* -config <configuration file path> : inputConfigFile
* -requiredfiles <artrequiredfiles directory path> :
* /tmp/artrequiredfiles
* -target <output directory path> : /tmp/adminjdbfiles
*/
outputAdminDir = new File("/tmp/adminjdbfiles");
RecoverConfig.makeDir(outputAdminDir);
Properties adminRecoverProps = new Properties();
adminRecoverProps.setProperty(ARTRequiredFiles.BASEDIR_KEY,
"/tmp/copybackup");
adminRecoverProps.setProperty(ARTRequiredFiles.RECOVERY_COPY_CLASS_KEY,
jeRecoverCopyClass);
Files.delete(inputConfigFile.toPath());
inputConfigFile =
File.createTempFile("adminrecoverconfig", ".properties");
writeProps(adminRecoverProps);
final String [] adminrecover =
{"-config", inputConfigFile.toString(),
"-requiredfiles", "/tmp/artrequiredfiles",
"-target", outputAdminDir.toString()};
assertTrue(AdminRecover.mainInternal(adminrecover));
/*
* Output directory format will be /tmp/adminjdbfiles/<adminX>/.
* In a 1x3 store, there will be single admin. adminDirCompletePath
* will be the input parameter for RecoverConfig enhancement
*/
String adminDirCompletePath = checkAdminJDBDirectory();
/*
* At this point we have admin jdb files copied from archive. Now
* we will run RecoverConfig on admin jdb files and generate
* topology output json file.
*
* -target path will be /tmp/recoverconfig.zip
*/
String outputRecoverConfigZip = "/tmp/recoverconfig.zip";
/*
* Run recoverconfig utility on adminDirCompletePath and
* check creation of topologyJSON output in config.zip file
*/
String[] recoverConfigArgv = {"-input", adminDirCompletePath,
"-target", outputRecoverConfigZip};
String message = "Configuration information recovered "
+ "successfully at " + outputRecoverConfigZip;
ByteArrayOutputStream output = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(output);
System.setOut(ps);
assertTrue(RecoverConfig.main1(recoverConfigArgv));
assertTrue(output.toString().contains(message));
/*
* recoverconfig.zip having top level directory as recoverconfig
* has been generated. Now unzip the recoverconfig.zip
*/
final File recoverConfigOutputDir = new File("/tmp");
checkOutputDirectory(recoverConfigOutputDir, "recoverconfig");
/*
* Before running SNRecover, delete current files in storage
* directory of admin and replication nodes. Then files will be
* recreated by SNRecover enhancement. We will check for existence
* of the files under newly created storage directory after SNRecover
* run.
*/
List<StorageNodeId> snIds =
cs.getTopology().getSortedStorageNodeIds();
createStore.shutdown();
for (StorageNodeId snId : snIds) {
StorageNodeParams snp = params.get(snId);
final File adminEnvPath = new File(snp.getRootDirPath() +
"/" + STORE_NAME +
"/" + snId.getFullName() + "/" +
"admin" +
String.valueOf(
snId.getStorageNodeId()) +
"/" + "env" + "/");
FileUtils.deleteDirectory(adminEnvPath);
final File replicationNodeEnvPath =
new File(snp.getRootDirPath() +
"/" + STORE_NAME +
"/" + snId.getFullName() +
"/" + "rg1-rn" +
String.valueOf(snId.getStorageNodeId()) +
"/" + "env" + "/");
FileUtils.deleteDirectory(replicationNodeEnvPath);
}
/*
* Now we will run SNRecover enhancement. We have generated
* requiredfiles.json and topologyoutput.json file.
*/
final String [] snrecover =
{"-config", inputConfigFile.toString(),
"-requiredfile",
"/tmp/artrequiredfiles/" + STORE_NAME + "_requiredfiles.json",
"-topologyfile", "/tmp/recoverconfig/topologyoutput.json",
"-hostname", "localhost", "-debug"};
assertTrue(SNRecover.mainInternal(snrecover));
/*
* After running SNRecover, check files in storage directory of admin
* and replication nodes. There should be one replication node and
* one admin node (which are winner nodes in respective shard) with
* jdb files copied from backup archive.
*/
int numNodeFilesCopied = 0 ;
for (StorageNodeId snId : snIds) {
StorageNodeParams snp = params.get(snId);
final File adminEnvPath = new File(snp.getRootDirPath() +
"/" + STORE_NAME +
"/" + snId.getFullName() + "/" +
"admin" +
String.valueOf(
snId.getStorageNodeId()) +
"/" + "env" + "/");
if (adminEnvPath.listFiles().length > 0) {
numNodeFilesCopied++;
}
final File replicationNodeEnvPath =
new File(snp.getRootDirPath() +
"/" + STORE_NAME +
"/" + snId.getFullName() +
"/" + "rg1-rn" +
String.valueOf(
snId.getStorageNodeId()) +
"/" + "env" + "/");
if (replicationNodeEnvPath.listFiles().length > 0) {
numNodeFilesCopied++;
}
}
assertTrue(numNodeFilesCopied >= 2);
/*
* Now we are setting up environment to inject IllegalArgumentException
* failure using MockRecoverObjectStorageCopy and validate the failure
* output in SNRecover.
*/
PrintStream originalSysErr = System.err;
System.setErr(ps);
adminRecoverProps.setProperty(ARTRequiredFiles.RECOVERY_COPY_CLASS_KEY,
failingRecoverCopyClass);
Files.delete(inputConfigFile.toPath());
inputConfigFile = File.createTempFile("adminrecoverconfig", ".properties");
writeProps(adminRecoverProps);
/*
* SNRecover should fail because of the error that will
* happen when the environment files are copied in.
* - the call to mainInternal should return false.
* - the utility should print an error message showing
* the problem.
*/
final String [] snrecoverFailure =
{"-config", inputConfigFile.toString(),
"-requiredfile",
"/tmp/artrequiredfiles/" + STORE_NAME + "_requiredfiles.json",
"-topologyfile", "/tmp/recoverconfig/topologyoutput.json",
"-hostname", "localhost", "-debug"};
try {
assertFalse(SNRecover.mainInternal(snrecoverFailure));
assertTrue(output.toString().contains(
FailingObjectStorageCopy.TEST_ERROR_MSG));
} finally {
System.setErr(originalSysErr);
}
}
private String checkAdminJDBDirectory() {
boolean isJDBFilesExist = false;
String adminDirCompletePath = null;
File[] outputDirFiles = outputAdminDir.listFiles();
for (File outputFile : outputDirFiles) {
if (outputFile.isDirectory() &&
outputFile.toString().contains("admin")) {
File [] adminjdbFiles = outputFile.listFiles();
for (File adminjdbFile : adminjdbFiles) {
if (adminjdbFile.toString().contains("jdb")) {
isJDBFilesExist = true;
adminDirCompletePath = outputFile.toString();
break;
}
}
}
}
assertTrue(isJDBFilesExist);
return adminDirCompletePath;
}
private void checkOutputDirectory(File outputDir, String method)
throws Exception {
File[] outputDirFiles = outputDir.listFiles();
for (File outputFile : outputDirFiles) {
if (method.equals("artrequiredfiles") &&
outputFile.getName().equals("ans.zip") ||
method.equals("recoverconfig") &&
outputFile.getName().equals("recoverconfig.zip")) {
AdminRecoverTest.unzipDirectory(outputFile);
}
}
}
private void writeProps(Properties props)
throws IOException, FileNotFoundException {
try (OutputStream configStream =
new FileOutputStream(inputConfigFile)) {
props.store(configStream, "Copy config file");
}
}
/** Format the date in human readable format. */
static String formatTime(final long millis) {
synchronized (dateFormat) {
return dateFormat.format(new Date(millis));
}
}
}
|
apache/jclouds | 36,212 | apis/ec2/src/main/java/org/jclouds/ec2/features/InstanceApi.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.ec2.features;
import static org.jclouds.aws.reference.FormParameters.ACTION;
import java.util.Map;
import java.util.Set;
import jakarta.inject.Named;
import jakarta.ws.rs.FormParam;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import org.jclouds.Fallbacks.EmptySetOnNotFoundOr404;
import org.jclouds.aws.filters.FormSigner;
import org.jclouds.ec2.binders.BindBlockDeviceMappingToIndexedFormParams;
import org.jclouds.ec2.binders.BindFiltersToIndexedFormParams;
import org.jclouds.ec2.binders.BindInstanceIdsToIndexedFormParams;
import org.jclouds.ec2.binders.IfNotNullBindAvailabilityZoneToFormParam;
import org.jclouds.ec2.domain.BlockDevice;
import org.jclouds.ec2.domain.InstanceStateChange;
import org.jclouds.ec2.domain.Reservation;
import org.jclouds.ec2.domain.RunningInstance;
import org.jclouds.ec2.domain.Volume.InstanceInitiatedShutdownBehavior;
import org.jclouds.ec2.functions.ConvertUnencodedBytesToBase64EncodedString;
import org.jclouds.ec2.options.RunInstancesOptions;
import org.jclouds.ec2.xml.BlockDeviceMappingHandler;
import org.jclouds.ec2.xml.BooleanValueHandler;
import org.jclouds.ec2.xml.DescribeInstancesResponseHandler;
import org.jclouds.ec2.xml.GetConsoleOutputResponseHandler;
import org.jclouds.ec2.xml.InstanceInitiatedShutdownBehaviorHandler;
import org.jclouds.ec2.xml.InstanceStateChangeHandler;
import org.jclouds.ec2.xml.InstanceTypeHandler;
import org.jclouds.ec2.xml.RunInstancesResponseHandler;
import org.jclouds.ec2.xml.StringValueHandler;
import org.jclouds.ec2.xml.UnencodeStringValueHandler;
import org.jclouds.javax.annotation.Nullable;
import org.jclouds.location.functions.RegionToEndpointOrProviderIfNull;
import org.jclouds.rest.annotations.BinderParam;
import org.jclouds.rest.annotations.EndpointParam;
import org.jclouds.rest.annotations.Fallback;
import org.jclouds.rest.annotations.FormParams;
import org.jclouds.rest.annotations.ParamParser;
import org.jclouds.rest.annotations.RequestFilters;
import org.jclouds.rest.annotations.SinceApiVersion;
import org.jclouds.rest.annotations.VirtualHost;
import org.jclouds.rest.annotations.XMLResponseParser;
import com.google.common.collect.Multimap;
/**
* Provides access to EC2 Instance Services via their REST API.
* <p/>
*/
@RequestFilters(FormSigner.class)
@VirtualHost
public interface InstanceApi {
/**
* Returns information about instances that you own.
* <p/>
*
* If you specify one or more instance IDs, Amazon EC2 returns information
* for those instances. If you do not specify instance IDs, Amazon EC2
* returns information for all relevant instances. If you specify an invalid
* instance ID, a fault is returned. If you specify an instance that you do
* not own, it will not be included in the returned results.
* <p/>
* Recently terminated instances might appear in the returned results.This
* interval is usually less than one hour.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
*
* @see #runInstancesInRegion
* @see #terminateInstancesInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstances.html"
* />
*/
@Named("DescribeInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "DescribeInstances")
@XMLResponseParser(DescribeInstancesResponseHandler.class)
@Fallback(EmptySetOnNotFoundOr404.class)
Set<? extends Reservation<? extends RunningInstance>> describeInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindInstanceIdsToIndexedFormParams.class) String... instanceIds);
/**
* Returns information about instances that you own.
* <p/>
*
* If you specify one or filters, Amazon EC2 returns information for instances
* matching those filters. If you do not specify any filters, Amazon EC2
* returns information for all relevant instances. If you specify an invalid
* filter, a fault is returned. Only instances you own will be included in the
* results.
* <p/>
* Recently terminated instances might appear in the returned results. This
* interval is usually less than one hour.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param filter
*
* @see #runInstancesInRegion
* @see #terminateInstancesInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstances.html"
* />
*/
@SinceApiVersion("2010-08-31")
@Named("DescribeInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "DescribeInstances")
@XMLResponseParser(DescribeInstancesResponseHandler.class)
@Fallback(EmptySetOnNotFoundOr404.class)
Set<? extends Reservation<? extends RunningInstance>> describeInstancesInRegionWithFilter(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindFiltersToIndexedFormParams.class) Multimap<String, String> filter);
/**
* Launches a specified number of instances of an AMI for which you have
* permissions.
* <p/>
*
* If Amazon EC2 cannot launch the minimum number AMIs you request, no
* instances will be launched. If there is insufficient capacity to launch
* the maximum number of AMIs you request, Amazon EC2 launches the minimum
* number specified for each AMI and allocate the remaining available
* instances using round robin.
* <p/>
* <h4>Security Groups</h4>
* <b>Note:</b> Every instance is launched in a security group (created using
* the CreateSecurityGroup operation.
* <h4>Key Pair</h4>
* You can provide an optional key pair ID for each image in the launch
* request (created using the CreateKeyPair operation). All instances that
* are created from images that use this key pair will have access to the
* associated public key at boot. You can use this key to provide secure
* access to an instance of an image on a per-instance basis. Amazon EC2
* public images use this feature to provide secure access without passwords.
* <p/>
* <b>Note:</b> Launching public images without a key pair ID will leave them
* inaccessible.
* <p/>
* The public key material is made available to the instance at boot time by
* placing it in the openssh_id.pub file on a logical device that is exposed
* to the instance as /dev/sda2 (the instance store). The format of this file
* is suitable for use as an entry within ~/.ssh/authorized_keys (the OpenSSH
* format). This can be done at boot (e.g., as part of rc.local) allowing for
* secure access without passwords.
* <h4>User Data</h4>
* Optional user data can be provided in the launch request. All instances
* that collectively comprise the launch request have access to this data.
* For more information, go the Amazon Elastic Compute Cloud Developer Guide.
* <h4>Product Codes</h4>
*
* <b>Note:</b> If any of the AMIs have a product code attached for which the
* user has not subscribed, the RunInstances call will fail.
* <h4>Kernel</h4>
*
* <b>Important:</b> We strongly recommend using the 2.6.18 Xen stock kernel
* with High-CPU and High-Memory instances. Although the default Amazon EC2
* kernels will work, the new kernels provide greater stability and
* performance for these instance types. For more information about kernels,
* go the Amazon Elastic Compute Cloud Developer Guide.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param nullableAvailabilityZone
* Specifies the placement constraints (Availability Zones) for
* launching the instances. If null, Amazon will determine the best
* availability zone to place the instance.
* @param imageId
* Unique ID of a machine image, returned by a call to
* @param minCount
* Minimum number of instances to launch. If the value is more than
* Amazon EC2 can launch, no instances a re launched at all.
* Constraints: Between 1 and the maximum number allowed for your
* account (default: 20).
* @param maxCount
* Maximum number of instances to launch. If the value is more than
* Amazon EC2 can launch, the largest possible number above
* minCount will be launched instead. Constraints: Between 1 and
* the maximum number allowed for your account (default: 20).
* @see #describeInstancesInRegion
* @see #terminateInstancesInRegion
* @see #authorizeSecurityGroupIngressInRegion
* @see #revokeSecurityGroupIngressInRegion
* @see #describeSecurityGroupsInRegion
* @see #createSecurityGroupInRegion
* @see #createKeyPairInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-RunInstances.html"
* />
* @see RunInstancesOptions
*/
@Named("RunInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "RunInstances")
@XMLResponseParser(RunInstancesResponseHandler.class)
Reservation<? extends RunningInstance> runInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@Nullable @BinderParam(IfNotNullBindAvailabilityZoneToFormParam.class) String nullableAvailabilityZone,
@FormParam("ImageId") String imageId, @FormParam("MinCount") int minCount,
@FormParam("MaxCount") int maxCount, RunInstancesOptions... options);
/**
* Requests a reboot of one or more instances. This operation is
* asynchronous; it only queues a request to reboot the specified
* instance(s). The operation will succeed if the instances are valid and
* belong to you. Requests to reboot terminated instances are ignored. <h3>
* Note</h3> If a Linux/UNIX instance does not cleanly shut down within four
* minutes, Amazon EC2 will perform a hard reboot.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
*
* @param instanceIds
* Instance ID to reboot.
*
* @see #startInstancesInRegion
* @see #runInstancesInRegion
* @see #describeInstancesInRegion
* @see #terminateInstancesInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-StopInstances.html"
* />
*/
@Named("RebootInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "RebootInstances")
void rebootInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindInstanceIdsToIndexedFormParams.class) String... instanceIds);
/**
* Shuts down one or more instances. This operation is idempotent; if you
* terminate an instance more than once, each call will succeed.
* <p/>
* Terminated instances will remain visible after termination (approximately
* one hour).
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceIds
* Instance ID to terminate.
* @see #describeInstancesInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-TerminateInstances.html"
* />
*/
@Named("TerminateInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "TerminateInstances")
@XMLResponseParser(InstanceStateChangeHandler.class)
@Fallback(EmptySetOnNotFoundOr404.class)
Set<? extends InstanceStateChange> terminateInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindInstanceIdsToIndexedFormParams.class) String... instanceIds);
/**
* Stops an instance that uses an Amazon EBS volume as its root device.
* <p/>
* Instances that use Amazon EBS volumes as their root devices can be quickly
* stopped and started. When an instance is stopped, the compute resources
* are released and you are not billed for hourly instance usage. However,
* your root partition Amazon EBS volume remains, continues to persist your
* data, and you are charged for Amazon EBS volume usage. You can restart
* your instance at any time.
* <h3>Note</h3>
* Before stopping an instance, make sure it is in a state from which it can
* be restarted. Stopping an instance does not preserve data stored in RAM.
* <p/>
* Performing this operation on an instance that uses an instance store as
* its root device returns an error.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param force
* Forces the instance to stop. The instance will not have an
* opportunity to flush file system caches nor file system meta
* data. If you use this option, you must perform file system check
* and repair procedures. This option is not recommended for
* Windows instances.
* @param instanceIds
* Instance ID to stop.
*
* @see #startInstancesInRegion
* @see #runInstancesInRegion
* @see #describeInstancesInRegion
* @see #terminateInstancesInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-StopInstances.html"
* />
*/
@Named("StopInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "StopInstances")
@XMLResponseParser(InstanceStateChangeHandler.class)
Set<? extends InstanceStateChange> stopInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("Force") boolean force,
@BinderParam(BindInstanceIdsToIndexedFormParams.class) String... instanceIds);
/**
* Starts an instance that uses an Amazon EBS volume as its root device.
* <p/>
* Instances that use Amazon EBS volumes as their root devices can be quickly
* stopped and started. When an instance is stopped, the compute resources
* are released and you are not billed for hourly instance usage. However,
* your root partition Amazon EBS volume remains, continues to persist your
* data, and you are charged for Amazon EBS volume usage. You can restart
* your instance at any time.
* <h3>Note</h3>
* Before stopping an instance, make sure it is in a state from which it can
* be restarted. Stopping an instance does not preserve data stored in RAM.
* <p/>
* Performing this operation on an instance that uses an instance store as
* its root device returns an error.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceIds
* Instance ID to start.
*
* @see #stopInstancesInRegion
* @see #runInstancesInRegion
* @see #describeInstancesInRegion
* @see #terminateInstancesInRegion
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-StartInstances.html"
* />
*/
@Named("StartInstances")
@POST
@Path("/")
@FormParams(keys = ACTION, values = "StartInstances")
@XMLResponseParser(InstanceStateChangeHandler.class)
Set<? extends InstanceStateChange> startInstancesInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@BinderParam(BindInstanceIdsToIndexedFormParams.class) String... instanceIds);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return unencoded user data
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "userData" })
@XMLResponseParser(UnencodeStringValueHandler.class)
String getUserDataForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return The root device name (e.g., /dev/sda1).
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "rootDeviceName" })
@XMLResponseParser(StringValueHandler.class)
String getRootDeviceNameForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return the ID of the RAM disk associated with the AMI.
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "ramdisk" })
@XMLResponseParser(StringValueHandler.class)
String getRamdiskForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return the ID of the kernel associated with the AMI.
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "kernel" })
@XMLResponseParser(StringValueHandler.class)
String getKernelForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return Specifies whether the instance can be terminated using the APIs.
* You must modify this attribute before you can terminate any
* "locked" instances from the APIs.
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "disableApiTermination" })
@XMLResponseParser(BooleanValueHandler.class)
boolean isApiTerminationDisabledForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return The instance type of the instance.
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "instanceType" })
@XMLResponseParser(InstanceTypeHandler.class)
String getInstanceTypeForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return whether the instance's Amazon EBS volumes are stopped or
* terminated when the instance is shut down.
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute",
"instanceInitiatedShutdownBehavior" })
@XMLResponseParser(InstanceInitiatedShutdownBehaviorHandler.class)
InstanceInitiatedShutdownBehavior getInstanceInitiatedShutdownBehaviorForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to describe the attribute of
* @return Describes the mapping that defines native device names to use when
* exposing virtual devices.
*/
@Named("DescribeInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "DescribeInstanceAttribute", "blockDeviceMapping" })
@XMLResponseParser(BlockDeviceMappingHandler.class)
Map<String, BlockDevice> getBlockDeviceMappingForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
* Resets an attribute of an instance to its default value.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to reset the attribute of
* @return the ID of the RAM disk associated with the AMI.
*/
@Named("ResetInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ResetInstanceAttribute", "ramdisk" })
void resetRamdiskForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
* Resets an attribute of an instance to its default value.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to reset the attribute of
* @return the ID of the kernel associated with the AMI.
*/
@Named("ResetInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ResetInstanceAttribute", "kernel" })
void resetKernelForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
/**
* Sets the userData used for starting the instance.
* <p/>
* The instance needs to be in a {@link InstanceState#STOPPED} state, which
* implies two things:
* <ol>
* <li>The instance was launched from an EBS-backed AMI so that it can stop</li>
* <li>You have stopped and waited for the instance to transition from
* {@link InstanceState#STOPPING} to {@link InstanceState#STOPPED}</li>
* </ol>
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to change the attribute of
* @param unencodedData
* unencoded data to set as userData
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ModifyInstanceAttribute", "userData" })
void setUserDataForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId,
@FormParam("Value") @ParamParser(ConvertUnencodedBytesToBase64EncodedString.class) byte[] unencodedData);
/**
* Sets the ramdisk used for starting the instance.
* <p/>
* The instance needs to be in a {@link InstanceState#STOPPED} state, which
* implies two things:
* <ol>
* <li>The instance was launched from an EBS-backed AMI so that it can stop</li>
* <li>You have stopped and waited for the instance to transition from
* {@link InstanceState#STOPPING} to {@link InstanceState#STOPPED}</li>
* </ol>
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to change the attribute of
* @param ramdisk
* ramdisk used to start the instance
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ModifyInstanceAttribute", "ramdisk" })
void setRamdiskForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId, @FormParam("Value") String ramdisk);
/**
* Sets the kernelId used for starting the instance.
* <p/>
* The instance needs to be in a {@link InstanceState#STOPPED} state, which
* implies two things:
* <ol>
* <li>The instance was launched from an EBS-backed AMI so that it can stop</li>
* <li>You have stopped and waited for the instance to transition from
* {@link InstanceState#STOPPING} to {@link InstanceState#STOPPED}</li>
* </ol>
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to change the attribute of
* @param kernel
* kernelId used to start the instance
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ModifyInstanceAttribute", "kernel" })
void setKernelForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId, @FormParam("Value") String kernel);
/**
* This command works while the instance is running and controls whether or
* not the api can be used to terminate the instance.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to reset the attribute of
* @param apiTerminationDisabled
* true to disable api termination
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ModifyInstanceAttribute", "disableApiTermination" })
void setApiTerminationDisabledForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId, @FormParam("Value") boolean apiTerminationDisabled);
/**
* Sets the instanceType used for starting the instance.
* <p/>
* The instance needs to be in a {@link InstanceState#STOPPED} state, which
* implies two things:
* <ol>
* <li>The instance was launched from an EBS-backed AMI so that it can stop</li>
* <li>You have stopped and waited for the instance to transition from
* {@link InstanceState#STOPPING} to {@link InstanceState#STOPPED}</li>
* </ol>
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to change the attribute of
* @param instanceType
* instanceType used to start the instance
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ModifyInstanceAttribute", "instanceType" })
void setInstanceTypeForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId, @FormParam("Value") String instanceType);
/**
* Specifies whether the instance's Amazon EBS volumes are stopped or
* terminated when the instance is shut down.
* <p/>
* The instance needs to be in a {@link InstanceState#STOPPED} state, which
* implies two things:
* <ol>
* <li>The instance was launched from an EBS-backed AMI so that it can stop</li>
* <li>You have stopped and waited for the instance to transition from
* {@link InstanceState#STOPPING} to {@link InstanceState#STOPPED}</li>
* </ol>
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to change the attribute of
* @param instanceInitiatedShutdownBehavior
* whether the instance's Amazon EBS volumes are stopped or
* terminated when the instance is shut down.
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION, "Attribute" }, values = { "ModifyInstanceAttribute",
"instanceInitiatedShutdownBehavior" })
void setInstanceInitiatedShutdownBehaviorForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId,
@FormParam("Value") InstanceInitiatedShutdownBehavior instanceInitiatedShutdownBehavior);
/**
* Sets the blockDeviceMapping used for an instance.
* <p/>
* The instance needs to be in a {@link InstanceState#STOPPED} state, which
* implies two things:
* <ol>
* <li>The instance was launched from an EBS-backed AMI so that it can stop</li>
* <li>You have stopped and waited for the instance to transition from
* {@link InstanceState#STOPPING} to {@link InstanceState#STOPPED}</li>
* </ol>
*
* To create the instances of {@link BlockDevice}, the
* constructor can be used with the following parameters:
* {@link BlockDevice#EbsBlockDevice(String, String, boolean)}
* , that are:
* <ol>
* <li>Volume id (required), for instance, "vol-blah"</li>
* <li>Device name (optional), for instance, "/dev/sda1". To find out more
* about device names, read the next paragraph.</li>
* <li>Delete on termination flag (optional), which defines whether the
* volume will be deleted upon instance's termination.</li>
* </ol>
* <p/>
* Note that the device names between Linux and Windows differ. For Linux,
* ensure that your device name is in the form /dev/sd[a-z] . For example,
* /dev/sda , /dev/sdb and /dev/sdh are all valid device names.
* <p/>
* For Windows, the root device is still referred to as /dev/sda1 . For other
* devices, ensure that they are in the form /xvd[c-p] . For example, /xvde ,
* /xvdf and /xvdp are all valid Windows device names.
* <p/>
* <b>NOTE</b>: As of now 02/20/2010, this command only works to change the
* DeleteOnTermination property of the device. The volume must be
* <i>attached</i> to a stopped instance.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to change the attribute of
* @param blockDeviceMapping
* blockDeviceMapping used to start the instance
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyInstanceAttribute.html"
* />
*/
@Named("ModifyInstanceAttribute")
@POST
@Path("/")
@FormParams(keys = { ACTION }, values = { "ModifyInstanceAttribute" })
void setBlockDeviceMappingForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId,
@BinderParam(BindBlockDeviceMappingToIndexedFormParams.class) Map<String, BlockDevice> blockDeviceMapping);
/**
* Retrieves console output for the specified instance.
*
* Instance console output is buffered and posted shortly after instance boot, reboot, and termination. Amazon EC2 preserves
* the most recent 64 KB output which will be available for at least one hour after the most recent post.
*
* @param region
* Instances are tied to Availability Zones. However, the instance
* ID is tied to the Region.
* @param instanceId
* which instance to retrieve console output for
* @return The console output
* @see <a href="http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-GetConsoleOutput.html">
* ApiReference query GetConsoleOutput</a>
*/
@Named("GetConsoleOutput")
@POST
@Path("/")
@FormParams(keys = { ACTION }, values = { "GetConsoleOutput" })
@XMLResponseParser(GetConsoleOutputResponseHandler.class)
String getConsoleOutputForInstanceInRegion(
@EndpointParam(parser = RegionToEndpointOrProviderIfNull.class) @Nullable String region,
@FormParam("InstanceId") String instanceId);
}
|
apache/solr | 36,237 | solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.ExitableDirectoryReader;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.spell.SuggestMode;
import org.apache.lucene.search.spell.SuggestWord;
import org.apache.solr.client.solrj.response.SpellCheckResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.SpellingParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrEventListener;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QueryLimits;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.spelling.AbstractLuceneSpellChecker;
import org.apache.solr.spelling.ConjunctionSolrSpellChecker;
import org.apache.solr.spelling.IndexBasedSpellChecker;
import org.apache.solr.spelling.QueryConverter;
import org.apache.solr.spelling.SolrSpellChecker;
import org.apache.solr.spelling.SpellCheckCollation;
import org.apache.solr.spelling.SpellCheckCollator;
import org.apache.solr.spelling.SpellingOptions;
import org.apache.solr.spelling.SpellingQueryConverter;
import org.apache.solr.spelling.SpellingResult;
import org.apache.solr.spelling.Token;
import org.apache.solr.util.SolrResponseUtil;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A SearchComponent implementation which provides support for spell checking and suggestions using
* the Lucene contributed SpellChecker.
*
* <p>Refer to <a href="https://solr.apache.org/guide/solr/latest/query-guide/spell-checking.html">
* https://solr.apache.org/guide/solr/latest/query-guide/spell-checking.html</a> for more details
*
* @since solr 1.3
*/
public class SpellCheckComponent extends SearchComponent implements SolrCoreAware, SpellingParams {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final boolean DEFAULT_ONLY_MORE_POPULAR = false;
/**
* Base name for all spell checker query parameters. This name is also used to register this
* component with SearchHandler.
*/
public static final String COMPONENT_NAME = "spellcheck";
protected NamedList<?> initParams;
/** Key is the dictionary, value is the SpellChecker for that dictionary name */
protected Map<String, SolrSpellChecker> spellCheckers = new ConcurrentHashMap<>();
protected QueryConverter queryConverter;
@Override
public void init(NamedList<?> args) {
super.init(args);
this.initParams = args;
}
@Override
public void prepare(ResponseBuilder rb) throws IOException {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false)) {
return;
}
QueryLimits queryLimits = QueryLimits.getCurrentLimits();
SolrSpellChecker spellChecker = getSpellChecker(params);
if (params.getBool(SPELLCHECK_BUILD, false)) {
spellChecker.build(rb.req.getCore(), rb.req.getSearcher());
rb.rsp.add("command", "build");
queryLimits.maybeExitWithPartialResults(
"SpellCheck build " + spellChecker.getDictionaryName());
} else if (params.getBool(SPELLCHECK_RELOAD, false)) {
spellChecker.reload(rb.req.getCore(), rb.req.getSearcher());
rb.rsp.add("command", "reload");
queryLimits.maybeExitWithPartialResults(
"SpellCheck reload " + spellChecker.getDictionaryName());
}
}
@Override
public void process(ResponseBuilder rb) throws IOException {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false) || spellCheckers.isEmpty()) {
return;
}
boolean shardRequest = "true".equals(params.get(ShardParams.IS_SHARD));
SolrSpellChecker spellChecker = getSpellChecker(params);
if (spellChecker != null) {
Collection<Token> tokens;
String q = params.get(SPELLCHECK_Q);
if (q != null) {
// we have a spell check param, tokenize it with the query analyzer applicable for this
// spellchecker
tokens = getTokens(q, spellChecker.getQueryAnalyzer());
} else {
q = rb.getQueryString();
if (q == null) {
q = params.get(CommonParams.Q);
}
tokens = queryConverter.convert(q);
}
if (tokens != null && tokens.isEmpty() == false) {
int count = params.getInt(SPELLCHECK_COUNT, 1);
boolean onlyMorePopular =
params.getBool(SPELLCHECK_ONLY_MORE_POPULAR, DEFAULT_ONLY_MORE_POPULAR);
boolean extendedResults = params.getBool(SPELLCHECK_EXTENDED_RESULTS, false);
boolean collate = params.getBool(SPELLCHECK_COLLATE, false);
float accuracy = params.getFloat(SPELLCHECK_ACCURACY, Float.MIN_VALUE);
int alternativeTermCount =
params.getInt(SpellingParams.SPELLCHECK_ALTERNATIVE_TERM_COUNT, 0);
// If specified, this can be a discrete # of results, or a percentage of fq results.
Integer maxResultsForSuggest = maxResultsForSuggest(rb);
ModifiableSolrParams customParams = new ModifiableSolrParams();
for (String checkerName : getDictionaryNames(params)) {
customParams.add(getCustomParams(checkerName, params));
}
Number hitsLong = (Number) rb.rsp.getToLog().get("hits");
long hits = 0;
if (hitsLong == null) {
hits = rb.getNumberDocumentsFound();
} else {
hits = hitsLong.longValue();
}
QueryLimits queryLimits = QueryLimits.getCurrentLimits();
SpellingResult spellingResult = null;
if (maxResultsForSuggest == null || hits <= maxResultsForSuggest) {
SuggestMode suggestMode = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
if (onlyMorePopular) {
suggestMode = SuggestMode.SUGGEST_MORE_POPULAR;
} else if (alternativeTermCount > 0) {
suggestMode = SuggestMode.SUGGEST_ALWAYS;
}
DirectoryReader reader = rb.req.getSearcher().getIndexReader();
if (queryLimits.isLimitsEnabled()) {
reader = ExitableDirectoryReader.wrap(reader, queryLimits);
}
SpellingOptions options =
new SpellingOptions(
tokens,
reader,
count,
alternativeTermCount,
suggestMode,
extendedResults,
accuracy,
customParams);
spellingResult = spellChecker.getSuggestions(options);
} else {
spellingResult = new SpellingResult();
}
if (queryLimits.maybeExitWithPartialResults("SpellCheck getSuggestions")) {
return;
}
boolean isCorrectlySpelled =
hits > (maxResultsForSuggest == null ? 0 : maxResultsForSuggest);
NamedList<Object> response = new SimpleOrderedMap<>();
NamedList<?> suggestions = toNamedList(shardRequest, spellingResult, q, extendedResults);
response.add("suggestions", suggestions);
if (extendedResults) {
response.add("correctlySpelled", isCorrectlySpelled);
}
if (collate) {
addCollationsToResponse(
params, spellingResult, rb, q, response, spellChecker.isSuggestionsMayOverlap());
}
if (shardRequest) {
addOriginalTermsToResponse(response, tokens);
}
rb.rsp.add("spellcheck", response);
}
} else {
throw new SolrException(
SolrException.ErrorCode.NOT_FOUND,
"Specified dictionaries do not exist: "
+ getDictionaryNameAsSingleString(getDictionaryNames(params)));
}
}
private Integer maxResultsForSuggest(ResponseBuilder rb) {
SolrParams params = rb.req.getParams();
float maxResultsForSuggestParamValue =
params.getFloat(SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, 0.0f);
Integer maxResultsForSuggest = null;
if (maxResultsForSuggestParamValue > 0.0f) {
if (maxResultsForSuggestParamValue == (int) maxResultsForSuggestParamValue) {
// If a whole number was passed in, this is a discrete number of documents
maxResultsForSuggest = (int) maxResultsForSuggestParamValue;
} else {
// If a fractional value was passed in, this is the % of documents returned by the specified
// filter. If no specified filter, we use the most restrictive filter of the fq parameters
String maxResultsFilterQueryString =
params.get(SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST_FQ);
int maxResultsByFilters = Integer.MAX_VALUE;
SolrIndexSearcher searcher = rb.req.getSearcher();
try {
if (maxResultsFilterQueryString != null) {
// Get the default Lucene query parser
QParser parser = QParser.getParser(maxResultsFilterQueryString, rb.req);
DocSet s = searcher.getDocSet(parser.getQuery());
maxResultsByFilters = s.size();
} else {
List<Query> filters = rb.getFilters();
// Get the maximum possible hits within these filters (size of most restrictive filter).
if (filters != null) {
for (Query query : filters) {
DocSet s = searcher.getDocSet(query);
if (s != null) {
maxResultsByFilters = Math.min(s.size(), maxResultsByFilters);
}
}
}
}
} catch (IOException | SyntaxError e) {
log.error("Error", e);
return null;
}
// Recalculate maxResultsForSuggest if filters were specified
if (maxResultsByFilters != Integer.MAX_VALUE) {
maxResultsForSuggest = Math.round(maxResultsByFilters * maxResultsForSuggestParamValue);
}
}
}
return maxResultsForSuggest;
}
protected void addCollationsToResponse(
SolrParams params,
SpellingResult spellingResult,
ResponseBuilder rb,
String q,
NamedList<Object> response,
boolean suggestionsMayOverlap) {
int maxCollations = params.getInt(SPELLCHECK_MAX_COLLATIONS, 1);
int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0);
int maxCollationEvaluations = params.getInt(SPELLCHECK_MAX_COLLATION_EVALUATIONS, 10000);
boolean collationExtendedResults = params.getBool(SPELLCHECK_COLLATE_EXTENDED_RESULTS, false);
int maxCollationCollectDocs = params.getInt(SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, 0);
// If not reporting hits counts, don't bother collecting more than 1 document per try.
if (!collationExtendedResults) {
maxCollationCollectDocs = 1;
}
boolean shard = params.getBool(ShardParams.IS_SHARD, false);
SpellCheckCollator collator =
new SpellCheckCollator()
.setMaxCollations(maxCollations)
.setMaxCollationTries(maxCollationTries)
.setMaxCollationEvaluations(maxCollationEvaluations)
.setSuggestionsMayOverlap(suggestionsMayOverlap)
.setDocCollectionLimit(maxCollationCollectDocs);
List<SpellCheckCollation> collations = collator.collate(spellingResult, q, rb);
// by sorting here we guarantee a non-distributed request returns all
// results in the same order as a distributed request would,
// even in cases when the internal rank is the same.
Collections.sort(collations);
NamedList<Object> collationList = new NamedList<>();
for (SpellCheckCollation collation : collations) {
if (collationExtendedResults) {
NamedList<Object> extendedResult = new SimpleOrderedMap<>();
extendedResult.add("collationQuery", collation.getCollationQuery());
extendedResult.add("hits", collation.getHits());
extendedResult.add("misspellingsAndCorrections", collation.getMisspellingsAndCorrections());
if (maxCollationTries > 0 && shard) {
extendedResult.add("collationInternalRank", collation.getInternalRank());
}
collationList.add("collation", extendedResult);
} else {
collationList.add("collation", collation.getCollationQuery());
if (maxCollationTries > 0 && shard) {
collationList.add("collationInternalRank", collation.getInternalRank());
}
}
}
response.add("collations", collationList);
}
private void addOriginalTermsToResponse(
NamedList<Object> response, Collection<Token> originalTerms) {
List<String> originalTermStr = new ArrayList<String>();
for (Token t : originalTerms) {
originalTermStr.add(t.toString());
}
response.add("originalTerms", originalTermStr);
}
/**
* For every param that is of the form "spellcheck.[dictionary name].XXXX=YYYY, add XXXX=YYYY as a
* param to the custom param list
*
* @param params The original SolrParams
* @return The new Params
*/
protected SolrParams getCustomParams(String dictionary, SolrParams params) {
ModifiableSolrParams result = new ModifiableSolrParams();
Iterator<String> iter = params.getParameterNamesIterator();
String prefix = SpellingParams.SPELLCHECK_PREFIX + dictionary + ".";
while (iter.hasNext()) {
String nxt = iter.next();
if (nxt.startsWith(prefix)) {
result.add(nxt.substring(prefix.length()), params.getParams(nxt));
}
}
return result;
}
@Override
public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false)) return;
int purpose =
rb.grouping() ? ShardRequest.PURPOSE_GET_TOP_GROUPS : ShardRequest.PURPOSE_GET_TOP_IDS;
if ((sreq.purpose & purpose) != 0) {
getSpellChecker(params).modifyRequest(rb, sreq);
// fetch at least 5 suggestions from each shard
int count = sreq.params.getInt(SPELLCHECK_COUNT, 1);
if (count < 5) count = 5;
sreq.params.set(SPELLCHECK_COUNT, count);
sreq.params.set("spellcheck", "true");
} else {
sreq.params.set("spellcheck", "false");
}
}
@Override
public void finishStage(ResponseBuilder rb) {
SolrParams params = rb.req.getParams();
if (!params.getBool(COMPONENT_NAME, false) || rb.getStage() != ResponseBuilder.STAGE_GET_FIELDS)
return;
boolean extendedResults = params.getBool(SPELLCHECK_EXTENDED_RESULTS, false);
boolean collate = params.getBool(SPELLCHECK_COLLATE, false);
boolean collationExtendedResults = params.getBool(SPELLCHECK_COLLATE_EXTENDED_RESULTS, false);
int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0);
int maxCollations = params.getInt(SPELLCHECK_MAX_COLLATIONS, 1);
Integer maxResultsForSuggest = maxResultsForSuggest(rb);
int count = rb.req.getParams().getInt(SPELLCHECK_COUNT, 1);
int numSug = Math.max(count, AbstractLuceneSpellChecker.DEFAULT_SUGGESTION_COUNT);
String origQuery = params.get(SPELLCHECK_Q);
if (origQuery == null) {
origQuery = rb.getQueryString();
if (origQuery == null) {
origQuery = params.get(CommonParams.Q);
}
}
long hits = rb.grouping() ? rb.totalHitCount : rb.getNumberDocumentsFound();
boolean isCorrectlySpelled = hits > (maxResultsForSuggest == null ? 0 : maxResultsForSuggest);
SpellCheckMergeData mergeData = new SpellCheckMergeData();
if (maxResultsForSuggest == null || !isCorrectlySpelled) {
for (ShardRequest sreq : rb.finished) {
for (ShardResponse srsp : sreq.responses) {
NamedList<?> nl =
(NamedList<?>)
SolrResponseUtil.getSubsectionFromShardResponse(rb, srsp, "spellcheck", true);
if (log.isDebugEnabled()) {
log.debug("{} {}", srsp.getShard(), nl);
}
if (nl != null) {
mergeData.totalNumberShardResponses++;
collectShardSuggestions(nl, mergeData);
collectShardCollations(mergeData, nl, maxCollationTries);
}
}
}
}
// all shard responses have been collected
// create token and get top suggestions
SolrSpellChecker checker = getSpellChecker(rb.req.getParams());
SpellingResult result = checker.mergeSuggestions(mergeData, numSug, count, extendedResults);
NamedList<Object> response = new SimpleOrderedMap<>();
NamedList<?> suggestions = toNamedList(false, result, origQuery, extendedResults);
response.add("suggestions", suggestions);
if (extendedResults) {
response.add("correctlySpelled", isCorrectlySpelled);
}
if (collate) {
SpellCheckCollation[] sortedCollations =
mergeData.collations.values().toArray(new SpellCheckCollation[0]);
Arrays.sort(sortedCollations);
NamedList<Object> collations = new NamedList<>();
int i = 0;
while (i < maxCollations && i < sortedCollations.length) {
SpellCheckCollation collation = sortedCollations[i];
i++;
if (collationExtendedResults) {
SimpleOrderedMap<Object> extendedResult = new SimpleOrderedMap<>();
extendedResult.add("collationQuery", collation.getCollationQuery());
extendedResult.add("hits", collation.getHits());
extendedResult.add(
"misspellingsAndCorrections", collation.getMisspellingsAndCorrections());
collations.add("collation", extendedResult);
} else {
collations.add("collation", collation.getCollationQuery());
}
}
response.add("collations", collations);
}
rb.rsp.add("spellcheck", response);
}
@SuppressWarnings("unchecked")
private void collectShardSuggestions(NamedList<?> nl, SpellCheckMergeData mergeData) {
SpellCheckResponse spellCheckResp = new SpellCheckResponse(nl);
Iterable<Object> originalTermStrings = (Iterable<Object>) nl.get("originalTerms");
if (originalTermStrings != null) {
mergeData.originalTerms = new HashSet<>();
for (Object originalTermObj : originalTermStrings) {
mergeData.originalTerms.add(originalTermObj.toString());
}
}
for (SpellCheckResponse.Suggestion suggestion : spellCheckResp.getSuggestions()) {
mergeData.origVsSuggestion.put(suggestion.getToken(), suggestion);
HashSet<String> suggested = mergeData.origVsSuggested.get(suggestion.getToken());
if (suggested == null) {
suggested = new HashSet<>();
mergeData.origVsSuggested.put(suggestion.getToken(), suggested);
}
// sum up original frequency
int origFreq = 0;
Integer o = mergeData.origVsFreq.get(suggestion.getToken());
if (o != null) origFreq += o;
origFreq += suggestion.getOriginalFrequency();
mergeData.origVsFreq.put(suggestion.getToken(), origFreq);
// # shards reporting
Integer origShards = mergeData.origVsShards.get(suggestion.getToken());
if (origShards == null) {
mergeData.origVsShards.put(suggestion.getToken(), 1);
} else {
mergeData.origVsShards.put(suggestion.getToken(), ++origShards);
}
// find best suggestions
for (int i = 0; i < suggestion.getNumFound(); i++) {
String alternative = suggestion.getAlternatives().get(i);
suggested.add(alternative);
SuggestWord sug = mergeData.suggestedVsWord.get(alternative);
if (sug == null) {
sug = new SuggestWord();
mergeData.suggestedVsWord.put(alternative, sug);
}
sug.string = alternative;
// alternative frequency is present only for extendedResults=true
if (suggestion.getAlternativeFrequencies() != null
&& suggestion.getAlternativeFrequencies().size() > 0) {
Integer freq = suggestion.getAlternativeFrequencies().get(i);
if (freq != null) sug.freq += freq;
}
}
}
}
private void collectShardCollations(
SpellCheckMergeData mergeData, NamedList<?> spellCheckResponse, int maxCollationTries) {
Map<String, SpellCheckCollation> collations = mergeData.collations;
NamedList<?> collationHolder = (NamedList<?>) spellCheckResponse.get("collations");
if (collationHolder != null) {
List<?> collationList = collationHolder.getAll("collation");
List<?> collationRankList = collationHolder.getAll("collationInternalRank");
int i = 0;
if (collationList != null) {
for (Object o : collationList) {
if (o instanceof String) {
SpellCheckCollation coll = new SpellCheckCollation();
coll.setCollationQuery((String) o);
if (collationRankList != null && collationRankList.size() > 0) {
coll.setInternalRank((Integer) collationRankList.get(i));
i++;
}
SpellCheckCollation priorColl = collations.get(coll.getCollationQuery());
if (priorColl != null) {
coll.setInternalRank(Math.max(coll.getInternalRank(), priorColl.getInternalRank()));
}
collations.put(coll.getCollationQuery(), coll);
} else {
NamedList<?> expandedCollation = (NamedList<?>) o;
SpellCheckCollation coll = new SpellCheckCollation();
coll.setCollationQuery((String) expandedCollation.get("collationQuery"));
coll.setHits(((Number) expandedCollation.get("hits")).longValue());
if (maxCollationTries > 0) {
coll.setInternalRank((Integer) expandedCollation.get("collationInternalRank"));
}
@SuppressWarnings("unchecked")
NamedList<String> misspellings =
(NamedList<String>) expandedCollation.get("misspellingsAndCorrections");
coll.setMisspellingsAndCorrections(misspellings);
SpellCheckCollation priorColl = collations.get(coll.getCollationQuery());
if (priorColl != null) {
coll.setHits(coll.getHits() + priorColl.getHits());
coll.setInternalRank(Math.max(coll.getInternalRank(), priorColl.getInternalRank()));
}
collations.put(coll.getCollationQuery(), coll);
}
}
}
}
}
private Collection<Token> getTokens(String q, Analyzer analyzer) throws IOException {
Collection<Token> result = new ArrayList<>();
assert analyzer != null;
try (TokenStream ts = analyzer.tokenStream("", q)) {
ts.reset();
// TODO: support custom attributes
CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
TypeAttribute typeAtt = ts.addAttribute(TypeAttribute.class);
FlagsAttribute flagsAtt = ts.addAttribute(FlagsAttribute.class);
PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class);
while (ts.incrementToken()) {
Token token = new Token();
token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
token.setOffset(offsetAtt.startOffset(), offsetAtt.endOffset());
token.setType(typeAtt.type());
token.setFlags(flagsAtt.getFlags());
token.setPayload(payloadAtt.getPayload());
token.setPositionIncrement(posIncAtt.getPositionIncrement());
result.add(token);
}
ts.end();
return result;
}
}
protected SolrSpellChecker getSpellChecker(SolrParams params) {
String[] dictName = getDictionaryNames(params);
if (dictName.length == 1) {
return spellCheckers.get(dictName[0]);
} else {
String singleStr = getDictionaryNameAsSingleString(dictName);
SolrSpellChecker ssc = spellCheckers.get(singleStr);
if (ssc == null) {
ConjunctionSolrSpellChecker cssc = new ConjunctionSolrSpellChecker();
for (String dn : dictName) {
cssc.addChecker(spellCheckers.get(dn));
}
ssc = cssc;
}
return ssc;
}
}
private String getDictionaryNameAsSingleString(String[] dictName) {
StringBuilder sb = new StringBuilder();
for (String dn : dictName) {
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(dn);
}
return sb.toString();
}
private String[] getDictionaryNames(SolrParams params) {
String[] dictName = params.getParams(SPELLCHECK_DICT);
if (dictName == null) {
return new String[] {SolrSpellChecker.DEFAULT_DICTIONARY_NAME};
}
return dictName;
}
/**
* @return the spellchecker registered to a given name
*/
public SolrSpellChecker getSpellChecker(String name) {
return spellCheckers.get(name);
}
protected NamedList<Object> toNamedList(
boolean shardRequest,
SpellingResult spellingResult,
String origQuery,
boolean extendedResults) {
NamedList<Object> result = new NamedList<>();
Map<Token, LinkedHashMap<String, Integer>> suggestions = spellingResult.getSuggestions();
boolean hasFreqInfo = spellingResult.hasTokenFrequencyInfo();
boolean hasSuggestions = false;
boolean hasZeroFrequencyToken = false;
for (Map.Entry<Token, LinkedHashMap<String, Integer>> entry : suggestions.entrySet()) {
Token inputToken = entry.getKey();
String tokenString = new String(inputToken.buffer(), 0, inputToken.length());
Map<String, Integer> theSuggestions = new LinkedHashMap<>(entry.getValue());
theSuggestions.keySet().removeIf(sug -> sug.equals(tokenString));
if (theSuggestions.size() > 0) {
hasSuggestions = true;
}
if (theSuggestions != null && (theSuggestions.size() > 0 || shardRequest)) {
SimpleOrderedMap<Object> suggestionList = new SimpleOrderedMap<>();
suggestionList.add("numFound", theSuggestions.size());
suggestionList.add("startOffset", inputToken.startOffset());
suggestionList.add("endOffset", inputToken.endOffset());
// Logical structure of normal (non-extended) results:
// "suggestion":["alt1","alt2"]
//
// Logical structure of the extended results:
// "suggestion":[
// {"word":"alt1","freq":7},
// {"word":"alt2","freq":4}
// ]
if (extendedResults && hasFreqInfo) {
suggestionList.add("origFreq", spellingResult.getTokenFrequency(inputToken));
ArrayList<SimpleOrderedMap<Object>> sugs = new ArrayList<>();
suggestionList.add("suggestion", sugs);
for (Map.Entry<String, Integer> suggEntry : theSuggestions.entrySet()) {
SimpleOrderedMap<Object> sugEntry = new SimpleOrderedMap<>();
sugEntry.add("word", suggEntry.getKey());
sugEntry.add("freq", suggEntry.getValue());
sugs.add(sugEntry);
}
} else {
suggestionList.add("suggestion", theSuggestions.keySet());
}
if (hasFreqInfo) {
Integer tokenFrequency = spellingResult.getTokenFrequency(inputToken);
if (tokenFrequency == null || tokenFrequency == 0) {
hasZeroFrequencyToken = true;
}
}
result.add(tokenString, suggestionList);
}
}
return result;
}
@Override
@SuppressWarnings({"unchecked"})
public void inform(SolrCore core) {
if (initParams != null) {
log.debug("Initializing spell checkers");
boolean hasDefault = false;
// TODO addSpellChecker response should set hasDefault!
for (Map.Entry<String, ?> initEntry : initParams) {
if ("spellchecker".equals(initEntry.getKey())) {
Object cfg = initEntry.getValue();
if (cfg instanceof NamedList) {
addSpellChecker(core, hasDefault, (NamedList<?>) cfg);
} else if (cfg instanceof Map) {
addSpellChecker(core, hasDefault, new NamedList<>((Map<String, ?>) cfg));
} else if (cfg instanceof List) {
for (Object o : (List<?>) cfg) {
if (o instanceof Map) {
addSpellChecker(core, hasDefault, new NamedList<>((Map<String, ?>) o));
}
}
}
}
}
Map<String, QueryConverter> queryConverters = new HashMap<>();
core.initPlugins(queryConverters, QueryConverter.class);
// ensure that there is at least one query converter defined
if (queryConverters.size() == 0) {
log.trace("No queryConverter defined, using default converter");
queryConverters.put("queryConverter", new SpellingQueryConverter());
}
// there should only be one
if (queryConverters.size() == 1) {
queryConverter = queryConverters.values().iterator().next();
IndexSchema schema = core.getLatestSchema();
String fieldTypeName = (String) initParams.get("queryAnalyzerFieldType");
FieldType fieldType = schema.getFieldTypes().get(fieldTypeName);
Analyzer analyzer =
fieldType == null ? new WhitespaceAnalyzer() : fieldType.getQueryAnalyzer();
// TODO: There's got to be a better way! Where's Spring when you need it?
queryConverter.setAnalyzer(analyzer);
}
}
}
private boolean addSpellChecker(SolrCore core, boolean hasDefault, NamedList<?> spellchecker) {
String className = (String) spellchecker.get("classname");
if (className == null) className = (String) spellchecker.get("class");
// TODO: this is a little bit sneaky: warn if class isn't supplied
// so that it's mandatory in a future release?
if (className == null) className = IndexBasedSpellChecker.class.getName();
SolrResourceLoader loader = core.getResourceLoader();
SolrSpellChecker checker = loader.newInstance(className, SolrSpellChecker.class);
if (checker != null) {
String dictionary = checker.init(spellchecker, core);
if (dictionary != null) {
boolean isDefault = dictionary.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME);
if (isDefault && !hasDefault) {
hasDefault = true;
} else if (isDefault && hasDefault) {
throw new RuntimeException("More than one dictionary is missing name.");
}
spellCheckers.put(dictionary, checker);
} else {
if (!hasDefault) {
spellCheckers.put(SolrSpellChecker.DEFAULT_DICTIONARY_NAME, checker);
hasDefault = true;
} else {
throw new RuntimeException("More than one dictionary is missing name.");
}
}
// Register event listeners for this SpellChecker
core.registerFirstSearcherListener(new SpellCheckerListener(core, checker, false, false));
boolean buildOnCommit = Boolean.parseBoolean((String) spellchecker.get("buildOnCommit"));
boolean buildOnOptimize = Boolean.parseBoolean((String) spellchecker.get("buildOnOptimize"));
if (buildOnCommit || buildOnOptimize) {
if (log.isInfoEnabled()) {
log.info(
"Registering newSearcher listener for spellchecker: {}", checker.getDictionaryName());
}
core.registerNewSearcherListener(
new SpellCheckerListener(core, checker, buildOnCommit, buildOnOptimize));
}
} else {
throw new RuntimeException("Can't load spell checker: " + className);
}
return hasDefault;
}
private static class SpellCheckerListener implements SolrEventListener {
private final SolrCore core;
private final SolrSpellChecker checker;
private final boolean buildOnCommit;
private final boolean buildOnOptimize;
public SpellCheckerListener(
SolrCore core, SolrSpellChecker checker, boolean buildOnCommit, boolean buildOnOptimize) {
this.core = core;
this.checker = checker;
this.buildOnCommit = buildOnCommit;
this.buildOnOptimize = buildOnOptimize;
}
@Override
public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {
if (currentSearcher == null) {
// firstSearcher event
try {
if (log.isInfoEnabled()) {
log.info("Loading spell index for spellchecker: {}", checker.getDictionaryName());
}
checker.reload(core, newSearcher);
} catch (IOException e) {
log.error(
"Exception in reloading spell check index for spellchecker: {}",
checker.getDictionaryName(),
e);
}
} else {
// newSearcher event
if (buildOnCommit) {
buildSpellIndex(newSearcher);
} else if (buildOnOptimize) {
if (newSearcher.getIndexReader().leaves().size() == 1) {
buildSpellIndex(newSearcher);
} else {
if (log.isInfoEnabled()) {
log.info(
"Index is not optimized therefore skipping building spell check index for: {}",
checker.getDictionaryName());
}
}
}
}
}
private void buildSpellIndex(SolrIndexSearcher newSearcher) {
try {
if (log.isInfoEnabled()) {
log.info("Building spell index for spell checker: {}", checker.getDictionaryName());
}
checker.build(core, newSearcher);
} catch (Exception e) {
log.error(
"Exception in building spell check index for spellchecker: {}",
checker.getDictionaryName(),
e);
}
}
@Override
public void postCommit() {}
@Override
public void postSoftCommit() {}
}
public Map<String, SolrSpellChecker> getSpellCheckers() {
return Collections.unmodifiableMap(spellCheckers);
}
// ///////////////////////////////////////////
// / SolrInfoBean
// //////////////////////////////////////////
@Override
public String getDescription() {
return "A Spell Checker component";
}
@Override
public Category getCategory() {
return Category.SPELLCHECKER;
}
}
|
googleapis/google-cloud-java | 36,029 | java-grafeas/src/main/java/io/grafeas/v1/SecretStatus.java | /*
* Copyright 2025 The Grafeas Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grafeas/v1/secret.proto
// Protobuf Java Version: 3.25.8
package io.grafeas.v1;
/**
*
*
* <pre>
* The status of the secret with a timestamp.
* </pre>
*
* Protobuf type {@code grafeas.v1.SecretStatus}
*/
public final class SecretStatus extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1.SecretStatus)
SecretStatusOrBuilder {
private static final long serialVersionUID = 0L;
// Use SecretStatus.newBuilder() to construct.
private SecretStatus(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SecretStatus() {
status_ = 0;
message_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SecretStatus();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1.Secret.internal_static_grafeas_v1_SecretStatus_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1.Secret.internal_static_grafeas_v1_SecretStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1.SecretStatus.class, io.grafeas.v1.SecretStatus.Builder.class);
}
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* Protobuf enum {@code grafeas.v1.SecretStatus.Status}
*/
public enum Status implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified
* </pre>
*
* <code>STATUS_UNSPECIFIED = 0;</code>
*/
STATUS_UNSPECIFIED(0),
/**
*
*
* <pre>
* The status of the secret is unknown.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
*
*
* <pre>
* The secret is valid.
* </pre>
*
* <code>VALID = 2;</code>
*/
VALID(2),
/**
*
*
* <pre>
* The secret is invalid.
* </pre>
*
* <code>INVALID = 3;</code>
*/
INVALID(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified
* </pre>
*
* <code>STATUS_UNSPECIFIED = 0;</code>
*/
public static final int STATUS_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The status of the secret is unknown.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
*
*
* <pre>
* The secret is valid.
* </pre>
*
* <code>VALID = 2;</code>
*/
public static final int VALID_VALUE = 2;
/**
*
*
* <pre>
* The secret is invalid.
* </pre>
*
* <code>INVALID = 3;</code>
*/
public static final int INVALID_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Status valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Status forNumber(int value) {
switch (value) {
case 0:
return STATUS_UNSPECIFIED;
case 1:
return UNKNOWN;
case 2:
return VALID;
case 3:
return INVALID;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Status> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Status> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Status>() {
public Status findValueByNumber(int number) {
return Status.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return io.grafeas.v1.SecretStatus.getDescriptor().getEnumTypes().get(0);
}
private static final Status[] VALUES = values();
public static Status valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Status(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:grafeas.v1.SecretStatus.Status)
}
private int bitField0_;
public static final int STATUS_FIELD_NUMBER = 1;
private int status_ = 0;
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for status.
*/
@java.lang.Override
public int getStatusValue() {
return status_;
}
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The status.
*/
@java.lang.Override
public io.grafeas.v1.SecretStatus.Status getStatus() {
io.grafeas.v1.SecretStatus.Status result = io.grafeas.v1.SecretStatus.Status.forNumber(status_);
return result == null ? io.grafeas.v1.SecretStatus.Status.UNRECOGNIZED : result;
}
public static final int UPDATE_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp updateTime_;
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateTime field is set.
*/
@java.lang.Override
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getUpdateTime() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
public static final int MESSAGE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (status_ != io.grafeas.v1.SecretStatus.Status.STATUS_UNSPECIFIED.getNumber()) {
output.writeEnum(1, status_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getUpdateTime());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, message_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (status_ != io.grafeas.v1.SecretStatus.Status.STATUS_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, status_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, message_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1.SecretStatus)) {
return super.equals(obj);
}
io.grafeas.v1.SecretStatus other = (io.grafeas.v1.SecretStatus) obj;
if (status_ != other.status_) return false;
if (hasUpdateTime() != other.hasUpdateTime()) return false;
if (hasUpdateTime()) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (!getMessage().equals(other.getMessage())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + status_;
if (hasUpdateTime()) {
hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getUpdateTime().hashCode();
}
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1.SecretStatus parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.SecretStatus parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.SecretStatus parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.SecretStatus parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.SecretStatus parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1.SecretStatus parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1.SecretStatus parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1.SecretStatus parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1.SecretStatus parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1.SecretStatus parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1.SecretStatus parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1.SecretStatus parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1.SecretStatus prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The status of the secret with a timestamp.
* </pre>
*
* Protobuf type {@code grafeas.v1.SecretStatus}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1.SecretStatus)
io.grafeas.v1.SecretStatusOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1.Secret.internal_static_grafeas_v1_SecretStatus_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1.Secret.internal_static_grafeas_v1_SecretStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1.SecretStatus.class, io.grafeas.v1.SecretStatus.Builder.class);
}
// Construct using io.grafeas.v1.SecretStatus.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
status_ = 0;
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
message_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1.Secret.internal_static_grafeas_v1_SecretStatus_descriptor;
}
@java.lang.Override
public io.grafeas.v1.SecretStatus getDefaultInstanceForType() {
return io.grafeas.v1.SecretStatus.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1.SecretStatus build() {
io.grafeas.v1.SecretStatus result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1.SecretStatus buildPartial() {
io.grafeas.v1.SecretStatus result = new io.grafeas.v1.SecretStatus(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(io.grafeas.v1.SecretStatus result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.status_ = status_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.message_ = message_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1.SecretStatus) {
return mergeFrom((io.grafeas.v1.SecretStatus) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1.SecretStatus other) {
if (other == io.grafeas.v1.SecretStatus.getDefaultInstance()) return this;
if (other.status_ != 0) {
setStatusValue(other.getStatusValue());
}
if (other.hasUpdateTime()) {
mergeUpdateTime(other.getUpdateTime());
}
if (!other.getMessage().isEmpty()) {
message_ = other.message_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
status_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
message_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int status_ = 0;
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for status.
*/
@java.lang.Override
public int getStatusValue() {
return status_;
}
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for status to set.
* @return This builder for chaining.
*/
public Builder setStatusValue(int value) {
status_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The status.
*/
@java.lang.Override
public io.grafeas.v1.SecretStatus.Status getStatus() {
io.grafeas.v1.SecretStatus.Status result =
io.grafeas.v1.SecretStatus.Status.forNumber(status_);
return result == null ? io.grafeas.v1.SecretStatus.Status.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The status to set.
* @return This builder for chaining.
*/
public Builder setStatus(io.grafeas.v1.SecretStatus.Status value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
status_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The status of the secret.
* </pre>
*
* <code>.grafeas.v1.SecretStatus.Status status = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearStatus() {
bitField0_ = (bitField0_ & ~0x00000001);
status_ = 0;
onChanged();
return this;
}
private com.google.protobuf.Timestamp updateTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
updateTimeBuilder_;
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateTime field is set.
*/
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateTime.
*/
public com.google.protobuf.Timestamp getUpdateTime() {
if (updateTimeBuilder_ == null) {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
} else {
return updateTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateTime_ = value;
} else {
updateTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (updateTimeBuilder_ == null) {
updateTime_ = builderForValue.build();
} else {
updateTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateTime_ != null
&& updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getUpdateTimeBuilder().mergeFrom(value);
} else {
updateTime_ = value;
}
} else {
updateTimeBuilder_.mergeFrom(value);
}
if (updateTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateTime() {
bitField0_ = (bitField0_ & ~0x00000002);
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
if (updateTimeBuilder_ != null) {
return updateTimeBuilder_.getMessageOrBuilder();
} else {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
}
}
/**
*
*
* <pre>
* The time the secret status was last updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getUpdateTimeFieldBuilder() {
if (updateTimeBuilder_ == null) {
updateTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getUpdateTime(), getParentForChildren(), isClean());
updateTime_ = null;
}
return updateTimeBuilder_;
}
private java.lang.Object message_ = "";
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for message.
*/
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
message_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional message about the status code.
* </pre>
*
* <code>string message = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
message_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1.SecretStatus)
}
// @@protoc_insertion_point(class_scope:grafeas.v1.SecretStatus)
private static final io.grafeas.v1.SecretStatus DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1.SecretStatus();
}
public static io.grafeas.v1.SecretStatus getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SecretStatus> PARSER =
new com.google.protobuf.AbstractParser<SecretStatus>() {
@java.lang.Override
public SecretStatus parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SecretStatus> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SecretStatus> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1.SecretStatus getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,353 | java-aiplatform/grpc-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FeatureOnlineStoreServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1beta1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/aiplatform/v1beta1/feature_online_store_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class FeatureOnlineStoreServiceGrpc {
private FeatureOnlineStoreServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.aiplatform.v1beta1.FeatureOnlineStoreService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
getFetchFeatureValuesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "FetchFeatureValues",
requestType = com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest.class,
responseType = com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
getFetchFeatureValuesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
getFetchFeatureValuesMethod;
if ((getFetchFeatureValuesMethod = FeatureOnlineStoreServiceGrpc.getFetchFeatureValuesMethod)
== null) {
synchronized (FeatureOnlineStoreServiceGrpc.class) {
if ((getFetchFeatureValuesMethod =
FeatureOnlineStoreServiceGrpc.getFetchFeatureValuesMethod)
== null) {
FeatureOnlineStoreServiceGrpc.getFetchFeatureValuesMethod =
getFetchFeatureValuesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "FetchFeatureValues"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new FeatureOnlineStoreServiceMethodDescriptorSupplier(
"FetchFeatureValues"))
.build();
}
}
}
return getFetchFeatureValuesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
getStreamingFetchFeatureValuesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "StreamingFetchFeatureValues",
requestType = com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest.class,
responseType = com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)
public static io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
getStreamingFetchFeatureValuesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
getStreamingFetchFeatureValuesMethod;
if ((getStreamingFetchFeatureValuesMethod =
FeatureOnlineStoreServiceGrpc.getStreamingFetchFeatureValuesMethod)
== null) {
synchronized (FeatureOnlineStoreServiceGrpc.class) {
if ((getStreamingFetchFeatureValuesMethod =
FeatureOnlineStoreServiceGrpc.getStreamingFetchFeatureValuesMethod)
== null) {
FeatureOnlineStoreServiceGrpc.getStreamingFetchFeatureValuesMethod =
getStreamingFetchFeatureValuesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "StreamingFetchFeatureValues"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1
.StreamingFetchFeatureValuesResponse.getDefaultInstance()))
.setSchemaDescriptor(
new FeatureOnlineStoreServiceMethodDescriptorSupplier(
"StreamingFetchFeatureValues"))
.build();
}
}
}
return getStreamingFetchFeatureValuesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
getSearchNearestEntitiesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "SearchNearestEntities",
requestType = com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest.class,
responseType = com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
getSearchNearestEntitiesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
getSearchNearestEntitiesMethod;
if ((getSearchNearestEntitiesMethod =
FeatureOnlineStoreServiceGrpc.getSearchNearestEntitiesMethod)
== null) {
synchronized (FeatureOnlineStoreServiceGrpc.class) {
if ((getSearchNearestEntitiesMethod =
FeatureOnlineStoreServiceGrpc.getSearchNearestEntitiesMethod)
== null) {
FeatureOnlineStoreServiceGrpc.getSearchNearestEntitiesMethod =
getSearchNearestEntitiesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "SearchNearestEntities"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new FeatureOnlineStoreServiceMethodDescriptorSupplier(
"SearchNearestEntities"))
.build();
}
}
}
return getSearchNearestEntitiesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest,
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
getFeatureViewDirectWriteMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "FeatureViewDirectWrite",
requestType = com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest.class,
responseType = com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)
public static io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest,
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
getFeatureViewDirectWriteMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest,
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
getFeatureViewDirectWriteMethod;
if ((getFeatureViewDirectWriteMethod =
FeatureOnlineStoreServiceGrpc.getFeatureViewDirectWriteMethod)
== null) {
synchronized (FeatureOnlineStoreServiceGrpc.class) {
if ((getFeatureViewDirectWriteMethod =
FeatureOnlineStoreServiceGrpc.getFeatureViewDirectWriteMethod)
== null) {
FeatureOnlineStoreServiceGrpc.getFeatureViewDirectWriteMethod =
getFeatureViewDirectWriteMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest,
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "FeatureViewDirectWrite"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new FeatureOnlineStoreServiceMethodDescriptorSupplier(
"FeatureViewDirectWrite"))
.build();
}
}
}
return getFeatureViewDirectWriteMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static FeatureOnlineStoreServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceStub>() {
@java.lang.Override
public FeatureOnlineStoreServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceStub(channel, callOptions);
}
};
return FeatureOnlineStoreServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static FeatureOnlineStoreServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceBlockingV2Stub>() {
@java.lang.Override
public FeatureOnlineStoreServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceBlockingV2Stub(channel, callOptions);
}
};
return FeatureOnlineStoreServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static FeatureOnlineStoreServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceBlockingStub>() {
@java.lang.Override
public FeatureOnlineStoreServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceBlockingStub(channel, callOptions);
}
};
return FeatureOnlineStoreServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static FeatureOnlineStoreServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<FeatureOnlineStoreServiceFutureStub>() {
@java.lang.Override
public FeatureOnlineStoreServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceFutureStub(channel, callOptions);
}
};
return FeatureOnlineStoreServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Fetch feature values under a FeatureView.
* </pre>
*/
default void fetchFeatureValues(
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getFetchFeatureValuesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Bidirectional streaming RPC to fetch feature values under a FeatureView.
* Requests may not have a one-to-one mapping to responses and responses may
* be returned out-of-order to reduce latency.
* </pre>
*/
default io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest>
streamingFetchFeatureValues(
io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
responseObserver) {
return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(
getStreamingFetchFeatureValuesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Search the nearest entities under a FeatureView.
* Search only works for indexable feature view; if a feature view isn't
* indexable, returns Invalid argument response.
* </pre>
*/
default void searchNearestEntities(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getSearchNearestEntitiesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Bidirectional streaming RPC to directly write to feature values in a
* feature view. Requests may not have a one-to-one mapping to responses and
* responses may be returned out-of-order to reduce latency.
* </pre>
*/
default io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest>
featureViewDirectWrite(
io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
responseObserver) {
return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(
getFeatureViewDirectWriteMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service FeatureOnlineStoreService.
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
public abstract static class FeatureOnlineStoreServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return FeatureOnlineStoreServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service FeatureOnlineStoreService.
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
public static final class FeatureOnlineStoreServiceStub
extends io.grpc.stub.AbstractAsyncStub<FeatureOnlineStoreServiceStub> {
private FeatureOnlineStoreServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected FeatureOnlineStoreServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Fetch feature values under a FeatureView.
* </pre>
*/
public void fetchFeatureValues(
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getFetchFeatureValuesMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Bidirectional streaming RPC to fetch feature values under a FeatureView.
* Requests may not have a one-to-one mapping to responses and responses may
* be returned out-of-order to reduce latency.
* </pre>
*/
public io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest>
streamingFetchFeatureValues(
io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
responseObserver) {
return io.grpc.stub.ClientCalls.asyncBidiStreamingCall(
getChannel().newCall(getStreamingFetchFeatureValuesMethod(), getCallOptions()),
responseObserver);
}
/**
*
*
* <pre>
* Search the nearest entities under a FeatureView.
* Search only works for indexable feature view; if a feature view isn't
* indexable, returns Invalid argument response.
* </pre>
*/
public void searchNearestEntities(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getSearchNearestEntitiesMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Bidirectional streaming RPC to directly write to feature values in a
* feature view. Requests may not have a one-to-one mapping to responses and
* responses may be returned out-of-order to reduce latency.
* </pre>
*/
public io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest>
featureViewDirectWrite(
io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
responseObserver) {
return io.grpc.stub.ClientCalls.asyncBidiStreamingCall(
getChannel().newCall(getFeatureViewDirectWriteMethod(), getCallOptions()),
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service FeatureOnlineStoreService.
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
public static final class FeatureOnlineStoreServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<FeatureOnlineStoreServiceBlockingV2Stub> {
private FeatureOnlineStoreServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected FeatureOnlineStoreServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Fetch feature values under a FeatureView.
* </pre>
*/
public com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse fetchFeatureValues(
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getFetchFeatureValuesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Bidirectional streaming RPC to fetch feature values under a FeatureView.
* Requests may not have a one-to-one mapping to responses and responses may
* be returned out-of-order to reduce latency.
* </pre>
*/
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/10918")
public io.grpc.stub.BlockingClientCall<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>
streamingFetchFeatureValues() {
return io.grpc.stub.ClientCalls.blockingBidiStreamingCall(
getChannel(), getStreamingFetchFeatureValuesMethod(), getCallOptions());
}
/**
*
*
* <pre>
* Search the nearest entities under a FeatureView.
* Search only works for indexable feature view; if a feature view isn't
* indexable, returns Invalid argument response.
* </pre>
*/
public com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse searchNearestEntities(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getSearchNearestEntitiesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Bidirectional streaming RPC to directly write to feature values in a
* feature view. Requests may not have a one-to-one mapping to responses and
* responses may be returned out-of-order to reduce latency.
* </pre>
*/
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/10918")
public io.grpc.stub.BlockingClientCall<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest,
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>
featureViewDirectWrite() {
return io.grpc.stub.ClientCalls.blockingBidiStreamingCall(
getChannel(), getFeatureViewDirectWriteMethod(), getCallOptions());
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service
* FeatureOnlineStoreService.
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
public static final class FeatureOnlineStoreServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<FeatureOnlineStoreServiceBlockingStub> {
private FeatureOnlineStoreServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected FeatureOnlineStoreServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Fetch feature values under a FeatureView.
* </pre>
*/
public com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse fetchFeatureValues(
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getFetchFeatureValuesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Search the nearest entities under a FeatureView.
* Search only works for indexable feature view; if a feature view isn't
* indexable, returns Invalid argument response.
* </pre>
*/
public com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse searchNearestEntities(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getSearchNearestEntitiesMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service
* FeatureOnlineStoreService.
*
* <pre>
* A service for fetching feature values from the online store.
* </pre>
*/
public static final class FeatureOnlineStoreServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<FeatureOnlineStoreServiceFutureStub> {
private FeatureOnlineStoreServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected FeatureOnlineStoreServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new FeatureOnlineStoreServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Fetch feature values under a FeatureView.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>
fetchFeatureValues(com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getFetchFeatureValuesMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Search the nearest entities under a FeatureView.
* Search only works for indexable feature view; if a feature view isn't
* indexable, returns Invalid argument response.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>
searchNearestEntities(
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getSearchNearestEntitiesMethod(), getCallOptions()), request);
}
}
private static final int METHODID_FETCH_FEATURE_VALUES = 0;
private static final int METHODID_SEARCH_NEAREST_ENTITIES = 1;
private static final int METHODID_STREAMING_FETCH_FEATURE_VALUES = 2;
private static final int METHODID_FEATURE_VIEW_DIRECT_WRITE = 3;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_FETCH_FEATURE_VALUES:
serviceImpl.fetchFeatureValues(
(com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>)
responseObserver);
break;
case METHODID_SEARCH_NEAREST_ENTITIES:
serviceImpl.searchNearestEntities(
(com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_STREAMING_FETCH_FEATURE_VALUES:
return (io.grpc.stub.StreamObserver<Req>)
serviceImpl.streamingFetchFeatureValues(
(io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>)
responseObserver);
case METHODID_FEATURE_VIEW_DIRECT_WRITE:
return (io.grpc.stub.StreamObserver<Req>)
serviceImpl.featureViewDirectWrite(
(io.grpc.stub.StreamObserver<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>)
responseObserver);
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getFetchFeatureValuesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse>(
service, METHODID_FETCH_FEATURE_VALUES)))
.addMethod(
getStreamingFetchFeatureValuesMethod(),
io.grpc.stub.ServerCalls.asyncBidiStreamingCall(
new MethodHandlers<
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesRequest,
com.google.cloud.aiplatform.v1beta1.StreamingFetchFeatureValuesResponse>(
service, METHODID_STREAMING_FETCH_FEATURE_VALUES)))
.addMethod(
getSearchNearestEntitiesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesRequest,
com.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse>(
service, METHODID_SEARCH_NEAREST_ENTITIES)))
.addMethod(
getFeatureViewDirectWriteMethod(),
io.grpc.stub.ServerCalls.asyncBidiStreamingCall(
new MethodHandlers<
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteRequest,
com.google.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse>(
service, METHODID_FEATURE_VIEW_DIRECT_WRITE)))
.build();
}
private abstract static class FeatureOnlineStoreServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
FeatureOnlineStoreServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.aiplatform.v1beta1.FeatureOnlineStoreServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("FeatureOnlineStoreService");
}
}
private static final class FeatureOnlineStoreServiceFileDescriptorSupplier
extends FeatureOnlineStoreServiceBaseDescriptorSupplier {
FeatureOnlineStoreServiceFileDescriptorSupplier() {}
}
private static final class FeatureOnlineStoreServiceMethodDescriptorSupplier
extends FeatureOnlineStoreServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
FeatureOnlineStoreServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (FeatureOnlineStoreServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new FeatureOnlineStoreServiceFileDescriptorSupplier())
.addMethod(getFetchFeatureValuesMethod())
.addMethod(getStreamingFetchFeatureValuesMethod())
.addMethod(getSearchNearestEntitiesMethod())
.addMethod(getFeatureViewDirectWriteMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-api-java-client-services | 36,205 | clients/google-api-services-realtimebidding/v1alpha/2.0.0/com/google/api/services/realtimebidding/v1alpha/RealTimeBidding.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.realtimebidding.v1alpha;
/**
* Service definition for RealTimeBidding (v1alpha).
*
* <p>
* Allows external bidders to manage their RTB integration with Google. This includes managing bidder endpoints, QPS quotas, configuring what ad inventory to receive via pretargeting, submitting creatives for verification, and accessing creative metadata such as approval status.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/authorized-buyers/apis/realtimebidding/reference/rest/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link RealTimeBiddingRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class RealTimeBidding extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
(com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1))) ||
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION >= 2,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"2.0.0 of the Real-time Bidding API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://realtimebidding.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://realtimebidding.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public RealTimeBidding(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
RealTimeBidding(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Bidders collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code RealTimeBidding realtimebidding = new RealTimeBidding(...);}
* {@code RealTimeBidding.Bidders.List request = realtimebidding.bidders().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Bidders bidders() {
return new Bidders();
}
/**
* The "bidders" collection of methods.
*/
public class Bidders {
/**
* An accessor for creating requests from the BiddingFunctions collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code RealTimeBidding realtimebidding = new RealTimeBidding(...);}
* {@code RealTimeBidding.BiddingFunctions.List request = realtimebidding.biddingFunctions().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public BiddingFunctions biddingFunctions() {
return new BiddingFunctions();
}
/**
* The "biddingFunctions" collection of methods.
*/
public class BiddingFunctions {
/**
* Activates an existing bidding function. An activated function is available for invocation for the
* server-side TURTLEDOVE simulations.
*
* Create a request for the method "biddingFunctions.activate".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link Activate#execute()} method to invoke the remote operation.
*
* @param name Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest}
* @return the request
*/
public Activate activate(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest content) throws java.io.IOException {
Activate result = new Activate(name, content);
initialize(result);
return result;
}
public class Activate extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction> {
private static final String REST_PATH = "v1alpha/{+name}:activate";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+/biddingFunctions/[^/]+$");
/**
* Activates an existing bidding function. An activated function is available for invocation for
* the server-side TURTLEDOVE simulations.
*
* Create a request for the method "biddingFunctions.activate".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link Activate#execute()} method to invoke the remote operation.
* <p> {@link
* Activate#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest}
* @since 1.13
*/
protected Activate(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ActivateBiddingFunctionRequest content) {
super(RealTimeBidding.this, "POST", REST_PATH, content, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
}
@Override
public Activate set$Xgafv(java.lang.String $Xgafv) {
return (Activate) super.set$Xgafv($Xgafv);
}
@Override
public Activate setAccessToken(java.lang.String accessToken) {
return (Activate) super.setAccessToken(accessToken);
}
@Override
public Activate setAlt(java.lang.String alt) {
return (Activate) super.setAlt(alt);
}
@Override
public Activate setCallback(java.lang.String callback) {
return (Activate) super.setCallback(callback);
}
@Override
public Activate setFields(java.lang.String fields) {
return (Activate) super.setFields(fields);
}
@Override
public Activate setKey(java.lang.String key) {
return (Activate) super.setKey(key);
}
@Override
public Activate setOauthToken(java.lang.String oauthToken) {
return (Activate) super.setOauthToken(oauthToken);
}
@Override
public Activate setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Activate) super.setPrettyPrint(prettyPrint);
}
@Override
public Activate setQuotaUser(java.lang.String quotaUser) {
return (Activate) super.setQuotaUser(quotaUser);
}
@Override
public Activate setUploadType(java.lang.String uploadType) {
return (Activate) super.setUploadType(uploadType);
}
@Override
public Activate setUploadProtocol(java.lang.String uploadProtocol) {
return (Activate) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The name of the bidding function to activate. Format:
`bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The name of the bidding function to activate. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public Activate setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
this.name = name;
return this;
}
@Override
public Activate set(String parameterName, Object value) {
return (Activate) super.set(parameterName, value);
}
}
/**
* Archives an existing bidding function. An archived function will not be available for function
* invocation for the server-side TURTLEDOVE simulations unless it is activated.
*
* Create a request for the method "biddingFunctions.archive".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link Archive#execute()} method to invoke the remote operation.
*
* @param name Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest}
* @return the request
*/
public Archive archive(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest content) throws java.io.IOException {
Archive result = new Archive(name, content);
initialize(result);
return result;
}
public class Archive extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction> {
private static final String REST_PATH = "v1alpha/{+name}:archive";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+/biddingFunctions/[^/]+$");
/**
* Archives an existing bidding function. An archived function will not be available for function
* invocation for the server-side TURTLEDOVE simulations unless it is activated.
*
* Create a request for the method "biddingFunctions.archive".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link Archive#execute()} method to invoke the remote operation.
* <p> {@link
* Archive#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest}
* @since 1.13
*/
protected Archive(java.lang.String name, com.google.api.services.realtimebidding.v1alpha.model.ArchiveBiddingFunctionRequest content) {
super(RealTimeBidding.this, "POST", REST_PATH, content, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
}
@Override
public Archive set$Xgafv(java.lang.String $Xgafv) {
return (Archive) super.set$Xgafv($Xgafv);
}
@Override
public Archive setAccessToken(java.lang.String accessToken) {
return (Archive) super.setAccessToken(accessToken);
}
@Override
public Archive setAlt(java.lang.String alt) {
return (Archive) super.setAlt(alt);
}
@Override
public Archive setCallback(java.lang.String callback) {
return (Archive) super.setCallback(callback);
}
@Override
public Archive setFields(java.lang.String fields) {
return (Archive) super.setFields(fields);
}
@Override
public Archive setKey(java.lang.String key) {
return (Archive) super.setKey(key);
}
@Override
public Archive setOauthToken(java.lang.String oauthToken) {
return (Archive) super.setOauthToken(oauthToken);
}
@Override
public Archive setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Archive) super.setPrettyPrint(prettyPrint);
}
@Override
public Archive setQuotaUser(java.lang.String quotaUser) {
return (Archive) super.setQuotaUser(quotaUser);
}
@Override
public Archive setUploadType(java.lang.String uploadType) {
return (Archive) super.setUploadType(uploadType);
}
@Override
public Archive setUploadProtocol(java.lang.String uploadProtocol) {
return (Archive) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The name of the bidding function to archive. Format:
`bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The name of the bidding function to archive. Format:
* `bidders/{bidder_account_id}/biddingFunction/{bidding_function_name}`
*/
public Archive setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^bidders/[^/]+/biddingFunctions/[^/]+$");
}
this.name = name;
return this;
}
@Override
public Archive set(String parameterName, Object value) {
return (Archive) super.set(parameterName, value);
}
}
/**
* Creates a new bidding function.
*
* Create a request for the method "biddingFunctions.create".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link Create#execute()} method to invoke the remote operation.
*
* @param parent Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction}
* @return the request
*/
public Create create(java.lang.String parent, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction content) throws java.io.IOException {
Create result = new Create(parent, content);
initialize(result);
return result;
}
public class Create extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction> {
private static final String REST_PATH = "v1alpha/{+parent}/biddingFunctions";
private final java.util.regex.Pattern PARENT_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+$");
/**
* Creates a new bidding function.
*
* Create a request for the method "biddingFunctions.create".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link Create#execute()} method to invoke the remote operation.
* <p> {@link
* Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param parent Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
* @param content the {@link com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction}
* @since 1.13
*/
protected Create(java.lang.String parent, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction content) {
super(RealTimeBidding.this, "POST", REST_PATH, content, com.google.api.services.realtimebidding.v1alpha.model.BiddingFunction.class);
this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
}
@Override
public Create set$Xgafv(java.lang.String $Xgafv) {
return (Create) super.set$Xgafv($Xgafv);
}
@Override
public Create setAccessToken(java.lang.String accessToken) {
return (Create) super.setAccessToken(accessToken);
}
@Override
public Create setAlt(java.lang.String alt) {
return (Create) super.setAlt(alt);
}
@Override
public Create setCallback(java.lang.String callback) {
return (Create) super.setCallback(callback);
}
@Override
public Create setFields(java.lang.String fields) {
return (Create) super.setFields(fields);
}
@Override
public Create setKey(java.lang.String key) {
return (Create) super.setKey(key);
}
@Override
public Create setOauthToken(java.lang.String oauthToken) {
return (Create) super.setOauthToken(oauthToken);
}
@Override
public Create setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Create) super.setPrettyPrint(prettyPrint);
}
@Override
public Create setQuotaUser(java.lang.String quotaUser) {
return (Create) super.setQuotaUser(quotaUser);
}
@Override
public Create setUploadType(java.lang.String uploadType) {
return (Create) super.setUploadType(uploadType);
}
@Override
public Create setUploadProtocol(java.lang.String uploadProtocol) {
return (Create) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
*/
@com.google.api.client.util.Key
private java.lang.String parent;
/** Required. The name of the bidder for which to create the bidding function. Format:
`bidders/{bidderAccountId}`
*/
public java.lang.String getParent() {
return parent;
}
/**
* Required. The name of the bidder for which to create the bidding function. Format:
* `bidders/{bidderAccountId}`
*/
public Create setParent(java.lang.String parent) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
this.parent = parent;
return this;
}
@Override
public Create set(String parameterName, Object value) {
return (Create) super.set(parameterName, value);
}
}
/**
* Lists the bidding functions that a bidder currently has registered.
*
* Create a request for the method "biddingFunctions.list".
*
* This request holds the parameters needed by the realtimebidding server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @param parent Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
* @return the request
*/
public List list(java.lang.String parent) throws java.io.IOException {
List result = new List(parent);
initialize(result);
return result;
}
public class List extends RealTimeBiddingRequest<com.google.api.services.realtimebidding.v1alpha.model.ListBiddingFunctionsResponse> {
private static final String REST_PATH = "v1alpha/{+parent}/biddingFunctions";
private final java.util.regex.Pattern PARENT_PATTERN =
java.util.regex.Pattern.compile("^bidders/[^/]+$");
/**
* Lists the bidding functions that a bidder currently has registered.
*
* Create a request for the method "biddingFunctions.list".
*
* This request holds the parameters needed by the the realtimebidding server. After setting any
* optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p>
* {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param parent Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
* @since 1.13
*/
protected List(java.lang.String parent) {
super(RealTimeBidding.this, "GET", REST_PATH, null, com.google.api.services.realtimebidding.v1alpha.model.ListBiddingFunctionsResponse.class);
this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
*/
@com.google.api.client.util.Key
private java.lang.String parent;
/** Required. Name of the bidder whose bidding functions will be listed. Format:
`bidders/{bidder_account_id}`
*/
public java.lang.String getParent() {
return parent;
}
/**
* Required. Name of the bidder whose bidding functions will be listed. Format:
* `bidders/{bidder_account_id}`
*/
public List setParent(java.lang.String parent) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
"Parameter parent must conform to the pattern " +
"^bidders/[^/]+$");
}
this.parent = parent;
return this;
}
/** The maximum number of bidding functions to return. */
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/** The maximum number of bidding functions to return.
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/** The maximum number of bidding functions to return. */
public List setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* A token identifying a page of results the server should return. This value is received
* from a previous `ListBiddingFunctions` call in
* ListBiddingFunctionsResponse.nextPageToken.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** A token identifying a page of results the server should return. This value is received from a
previous `ListBiddingFunctions` call in ListBiddingFunctionsResponse.nextPageToken.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* A token identifying a page of results the server should return. This value is received
* from a previous `ListBiddingFunctions` call in
* ListBiddingFunctionsResponse.nextPageToken.
*/
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link RealTimeBidding}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link RealTimeBidding}. */
@Override
public RealTimeBidding build() {
return new RealTimeBidding(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link RealTimeBiddingRequestInitializer}.
*
* @since 1.12
*/
public Builder setRealTimeBiddingRequestInitializer(
RealTimeBiddingRequestInitializer realtimebiddingRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(realtimebiddingRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java | 36,074 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ListSessionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/conversational_search_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Response for ListSessions method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListSessionsResponse}
*/
public final class ListSessionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ListSessionsResponse)
ListSessionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSessionsResponse.newBuilder() to construct.
private ListSessionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSessionsResponse() {
sessions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSessionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListSessionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListSessionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListSessionsResponse.class,
com.google.cloud.discoveryengine.v1.ListSessionsResponse.Builder.class);
}
public static final int SESSIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1.Session> sessions_;
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1.Session> getSessionsList() {
return sessions_;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1.SessionOrBuilder>
getSessionsOrBuilderList() {
return sessions_;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
@java.lang.Override
public int getSessionsCount() {
return sessions_.size();
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Session getSessions(int index) {
return sessions_.get(index);
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.SessionOrBuilder getSessionsOrBuilder(int index) {
return sessions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < sessions_.size(); i++) {
output.writeMessage(1, sessions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < sessions_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sessions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.ListSessionsResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.ListSessionsResponse other =
(com.google.cloud.discoveryengine.v1.ListSessionsResponse) obj;
if (!getSessionsList().equals(other.getSessionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSessionsCount() > 0) {
hash = (37 * hash) + SESSIONS_FIELD_NUMBER;
hash = (53 * hash) + getSessionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.ListSessionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListSessions method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListSessionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ListSessionsResponse)
com.google.cloud.discoveryengine.v1.ListSessionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListSessionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListSessionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListSessionsResponse.class,
com.google.cloud.discoveryengine.v1.ListSessionsResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.ListSessionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (sessionsBuilder_ == null) {
sessions_ = java.util.Collections.emptyList();
} else {
sessions_ = null;
sessionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListSessionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListSessionsResponse getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.ListSessionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListSessionsResponse build() {
com.google.cloud.discoveryengine.v1.ListSessionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListSessionsResponse buildPartial() {
com.google.cloud.discoveryengine.v1.ListSessionsResponse result =
new com.google.cloud.discoveryengine.v1.ListSessionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1.ListSessionsResponse result) {
if (sessionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
sessions_ = java.util.Collections.unmodifiableList(sessions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sessions_ = sessions_;
} else {
result.sessions_ = sessionsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.ListSessionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.ListSessionsResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1.ListSessionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ListSessionsResponse other) {
if (other == com.google.cloud.discoveryengine.v1.ListSessionsResponse.getDefaultInstance())
return this;
if (sessionsBuilder_ == null) {
if (!other.sessions_.isEmpty()) {
if (sessions_.isEmpty()) {
sessions_ = other.sessions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSessionsIsMutable();
sessions_.addAll(other.sessions_);
}
onChanged();
}
} else {
if (!other.sessions_.isEmpty()) {
if (sessionsBuilder_.isEmpty()) {
sessionsBuilder_.dispose();
sessionsBuilder_ = null;
sessions_ = other.sessions_;
bitField0_ = (bitField0_ & ~0x00000001);
sessionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSessionsFieldBuilder()
: null;
} else {
sessionsBuilder_.addAllMessages(other.sessions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1.Session m =
input.readMessage(
com.google.cloud.discoveryengine.v1.Session.parser(), extensionRegistry);
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.add(m);
} else {
sessionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1.Session> sessions_ =
java.util.Collections.emptyList();
private void ensureSessionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
sessions_ = new java.util.ArrayList<com.google.cloud.discoveryengine.v1.Session>(sessions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Session,
com.google.cloud.discoveryengine.v1.Session.Builder,
com.google.cloud.discoveryengine.v1.SessionOrBuilder>
sessionsBuilder_;
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Session> getSessionsList() {
if (sessionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(sessions_);
} else {
return sessionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public int getSessionsCount() {
if (sessionsBuilder_ == null) {
return sessions_.size();
} else {
return sessionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Session getSessions(int index) {
if (sessionsBuilder_ == null) {
return sessions_.get(index);
} else {
return sessionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder setSessions(int index, com.google.cloud.discoveryengine.v1.Session value) {
if (sessionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSessionsIsMutable();
sessions_.set(index, value);
onChanged();
} else {
sessionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder setSessions(
int index, com.google.cloud.discoveryengine.v1.Session.Builder builderForValue) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.set(index, builderForValue.build());
onChanged();
} else {
sessionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder addSessions(com.google.cloud.discoveryengine.v1.Session value) {
if (sessionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSessionsIsMutable();
sessions_.add(value);
onChanged();
} else {
sessionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder addSessions(int index, com.google.cloud.discoveryengine.v1.Session value) {
if (sessionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSessionsIsMutable();
sessions_.add(index, value);
onChanged();
} else {
sessionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder addSessions(
com.google.cloud.discoveryengine.v1.Session.Builder builderForValue) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.add(builderForValue.build());
onChanged();
} else {
sessionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder addSessions(
int index, com.google.cloud.discoveryengine.v1.Session.Builder builderForValue) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.add(index, builderForValue.build());
onChanged();
} else {
sessionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder addAllSessions(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1.Session> values) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sessions_);
onChanged();
} else {
sessionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder clearSessions() {
if (sessionsBuilder_ == null) {
sessions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
sessionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public Builder removeSessions(int index) {
if (sessionsBuilder_ == null) {
ensureSessionsIsMutable();
sessions_.remove(index);
onChanged();
} else {
sessionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Session.Builder getSessionsBuilder(int index) {
return getSessionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.SessionOrBuilder getSessionsOrBuilder(int index) {
if (sessionsBuilder_ == null) {
return sessions_.get(index);
} else {
return sessionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1.SessionOrBuilder>
getSessionsOrBuilderList() {
if (sessionsBuilder_ != null) {
return sessionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sessions_);
}
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Session.Builder addSessionsBuilder() {
return getSessionsFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1.Session.getDefaultInstance());
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Session.Builder addSessionsBuilder(int index) {
return getSessionsFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1.Session.getDefaultInstance());
}
/**
*
*
* <pre>
* All the Sessions for a given data store.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Session sessions = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Session.Builder>
getSessionsBuilderList() {
return getSessionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Session,
com.google.cloud.discoveryengine.v1.Session.Builder,
com.google.cloud.discoveryengine.v1.SessionOrBuilder>
getSessionsFieldBuilder() {
if (sessionsBuilder_ == null) {
sessionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Session,
com.google.cloud.discoveryengine.v1.Session.Builder,
com.google.cloud.discoveryengine.v1.SessionOrBuilder>(
sessions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
sessions_ = null;
}
return sessionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ListSessionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ListSessionsResponse)
private static final com.google.cloud.discoveryengine.v1.ListSessionsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ListSessionsResponse();
}
public static com.google.cloud.discoveryengine.v1.ListSessionsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSessionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSessionsResponse>() {
@java.lang.Override
public ListSessionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSessionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSessionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListSessionsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,073 | java-dataproc-metastore/proto-google-cloud-dataproc-metastore-v1alpha/src/main/java/com/google/cloud/metastore/v1alpha/AlterMetadataResourceLocationRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/metastore/v1alpha/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.metastore.v1alpha;
/**
*
*
* <pre>
* Request message for
* [DataprocMetastore.AlterMetadataResourceLocation][google.cloud.metastore.v1alpha.DataprocMetastore.AlterMetadataResourceLocation].
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest}
*/
public final class AlterMetadataResourceLocationRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest)
AlterMetadataResourceLocationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use AlterMetadataResourceLocationRequest.newBuilder() to construct.
private AlterMetadataResourceLocationRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AlterMetadataResourceLocationRequest() {
service_ = "";
resourceName_ = "";
locationUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AlterMetadataResourceLocationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_AlterMetadataResourceLocationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_AlterMetadataResourceLocationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest.class,
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest.Builder.class);
}
public static final int SERVICE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object service_ = "";
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The service.
*/
@java.lang.Override
public java.lang.String getService() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
service_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for service.
*/
@java.lang.Override
public com.google.protobuf.ByteString getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
service_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOCATION_URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object locationUri_ = "";
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The locationUri.
*/
@java.lang.Override
public java.lang.String getLocationUri() {
java.lang.Object ref = locationUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
locationUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for locationUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLocationUriBytes() {
java.lang.Object ref = locationUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
locationUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, service_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, locationUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, service_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, locationUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest)) {
return super.equals(obj);
}
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest other =
(com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest) obj;
if (!getService().equals(other.getService())) return false;
if (!getResourceName().equals(other.getResourceName())) return false;
if (!getLocationUri().equals(other.getLocationUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SERVICE_FIELD_NUMBER;
hash = (53 * hash) + getService().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (37 * hash) + LOCATION_URI_FIELD_NUMBER;
hash = (53 * hash) + getLocationUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [DataprocMetastore.AlterMetadataResourceLocation][google.cloud.metastore.v1alpha.DataprocMetastore.AlterMetadataResourceLocation].
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest)
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_AlterMetadataResourceLocationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_AlterMetadataResourceLocationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest.class,
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest.Builder
.class);
}
// Construct using
// com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
service_ = "";
resourceName_ = "";
locationUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_AlterMetadataResourceLocationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
getDefaultInstanceForType() {
return com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest build() {
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest buildPartial() {
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest result =
new com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.service_ = service_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.resourceName_ = resourceName_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.locationUri_ = locationUri_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest) {
return mergeFrom(
(com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest other) {
if (other
== com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
.getDefaultInstance()) return this;
if (!other.getService().isEmpty()) {
service_ = other.service_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getLocationUri().isEmpty()) {
locationUri_ = other.locationUri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
service_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
locationUri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object service_ = "";
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The service.
*/
public java.lang.String getService() {
java.lang.Object ref = service_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
service_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for service.
*/
public com.google.protobuf.ByteString getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
service_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The service to set.
* @return This builder for chaining.
*/
public Builder setService(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
service_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearService() {
service_ = getDefaultInstance().getService();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for service to set.
* @return This builder for chaining.
*/
public Builder setServiceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
service_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object resourceName_ = "";
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object locationUri_ = "";
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The locationUri.
*/
public java.lang.String getLocationUri() {
java.lang.Object ref = locationUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
locationUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for locationUri.
*/
public com.google.protobuf.ByteString getLocationUriBytes() {
java.lang.Object ref = locationUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
locationUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The locationUri to set.
* @return This builder for chaining.
*/
public Builder setLocationUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
locationUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearLocationUri() {
locationUri_ = getDefaultInstance().getLocationUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for locationUri to set.
* @return This builder for chaining.
*/
public Builder setLocationUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
locationUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest)
private static final com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest();
}
public static com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> PARSER =
new com.google.protobuf.AbstractParser<AlterMetadataResourceLocationRequest>() {
@java.lang.Override
public AlterMetadataResourceLocationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.AlterMetadataResourceLocationRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 36,090 | prj/test/functional/queues/src/main/java/queues/AbstractQueueTests.java | /*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package queues;
import com.oracle.coherence.common.base.Exceptions;
import com.oracle.coherence.common.base.Randoms;
import com.tangosol.coherence.config.scheme.PagedQueueScheme;
import com.tangosol.coherence.config.scheme.SimpleDequeScheme;
import com.tangosol.internal.net.queue.NamedMapDeque;
import com.tangosol.internal.net.queue.PagedQueue;
import com.tangosol.internal.net.queue.QueuePageIterator;
import com.tangosol.internal.net.queue.extractor.QueueKeyExtractor;
import com.tangosol.internal.net.queue.model.QueueKey;
import com.tangosol.net.CacheService;
import com.tangosol.net.ConfigurableCacheFactory;
import com.tangosol.net.NamedDeque;
import com.tangosol.net.NamedMap;
import com.tangosol.net.NamedQueue;
import com.tangosol.net.Session;
import com.tangosol.net.cache.ConfigurableCacheMap;
import com.tangosol.util.ConverterCollections;
import com.tangosol.util.InvocableMap;
import com.tangosol.util.MapIndex;
import com.tangosol.util.ObservableMap;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.TreeSet;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import java.util.function.Function;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.number.OrderingComparison.greaterThan;
import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo;
import static org.hamcrest.number.OrderingComparison.lessThanOrEqualTo;
import static org.junit.jupiter.api.Assertions.assertThrows;
@SuppressWarnings({"rawtypes", "unchecked", "MismatchedQueryAndUpdateOfCollection"})
public abstract class AbstractQueueTests<QueueType extends NamedQueue>
extends AbstractCollectionTests<QueueType, QueueType>
implements QueueTests<QueueType>
{
@Override
@SuppressWarnings("unchecked")
public QueueType getNamedCollection(Session session, String sName)
{
return (QueueType) SimpleDequeScheme.INSTANCE.realize(sName, session);
}
@Override
@SuppressWarnings("unchecked")
public QueueType getCollection(Session session, String sName)
{
return (QueueType) SimpleDequeScheme.INSTANCE.realize(sName, session);
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldFailToEnsureIncompatibleQueue(String sSerializer) throws Exception
{
QueueType queue = getNewCollection(sSerializer);
String sName = queue.getName();
Session session = getSession();
QueueType underlying = queue;
if (underlying instanceof ConverterCollections.ConverterCollection<?,?>)
{
underlying = (QueueType) ((ConverterCollections.ConverterCollection)queue).getCollection();
}
if (underlying instanceof PagedQueue<?>)
{
// cannot ensure a simple queue of the same name
assertThrows(IllegalStateException.class, () -> SimpleDequeScheme.INSTANCE.realize(sName, session));
}
else if (underlying instanceof NamedDeque<?>)
{
// cannot ensure a paged queue of the same name
assertThrows(IllegalStateException.class, () -> PagedQueueScheme.INSTANCE.realize(sName, session));
}
else // must be NamedQueue
{
// cannot ensure a paged queue of the same name
assertThrows(IllegalStateException.class, () -> PagedQueueScheme.INSTANCE.realize(sName, session));
}
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldEnsureCompatibleQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sName = queue.getName();
Session session = getSession();
QueueType underlying = queue;
if (underlying instanceof ConverterCollections.ConverterCollection<?,?>)
{
underlying = (QueueType) ((ConverterCollections.ConverterCollection)queue).getCollection();
}
NamedQueue<?> queue2;
if (underlying instanceof PagedQueue<?>)
{
queue2 = PagedQueueScheme.INSTANCE.realize(sName, session);
assertThat(queue2.getName(), is(sName));
assertThat(queue2, is(instanceOf(PagedQueue.class)));
}
else if (underlying instanceof NamedDeque<?>)
{
queue2 = SimpleDequeScheme.INSTANCE.realize(sName, session);
assertThat(queue2.getName(), is(sName));
assertThat(queue2, is(instanceOf(NamedDeque.class)));
}
else // must be NamedQueue
{
queue2 = SimpleDequeScheme.INSTANCE.realize(sName, session);
assertThat(queue2.getName(), is(sName));
assertThat(queue2, is(instanceOf(NamedQueue.class)));
}
}
// ----- test add() method ----------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldAddToQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sValue = "message-1";
assertThat(queue.add(sValue), is(true));
NamedMap<?, ?> cache = getCollectionCache(queue.getName());
assertThat(cache.size(), is(1));
Object oKey = cache.keySet().iterator().next();
Object oValue = cache.get(oKey);
assertThat(oValue, is(sValue));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldAddToQueueInOrder(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sPrefix = "message-";
int cMessage = 100;
for (int i = 0; i < cMessage; i++)
{
String sValue = sPrefix + i;
assertThat(queue.add(sValue), is(true));
}
NamedMap<?, ?> cache = getCollectionCache(queue.getName());
assertThat(cache.size(), is(cMessage));
TreeSet<?> setKey = new TreeSet<>(cache.keySet());
assertThat(setKey.size(), is(cMessage));
int i = 0;
for (Object key : setKey)
{
String sExpected = sPrefix + i;
assertThat(cache.get(key), is(sExpected));
i++;
}
}
// ----- test append() method -------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldAppendToQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sValue1 = "message-1";
String sValue2 = "message-2";
String sValue3 = "message-3";
long nId1 = queue.append(sValue1);
long nId2 = queue.append(sValue2);
long nId3 = queue.append(sValue3);
assertThat(nId1, is(greaterThan(Long.MIN_VALUE)));
assertThat(nId2, is(greaterThan(Long.MIN_VALUE)));
assertThat(nId3, is(greaterThan(Long.MIN_VALUE)));
NamedMap<?, ?> cache = getCollectionCache(queue.getName());
List<?> listKey = cache.keySet().stream().sorted().toList();
assertThat(listKey.size(), is(3));
assertThat(cache.get(listKey.get(0)), is(sValue1));
assertThat(cache.get(listKey.get(1)), is(sValue2));
assertThat(cache.get(listKey.get(2)), is(sValue3));
}
// ----- test offer() method --------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferToQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sValue = "message-1";
assertThat(queue.offer(sValue), is(true));
NamedMap<?, ?> cache = getCollectionCache(queue.getName());
assertThat(cache.size(), is(1));
Object oKey = cache.keySet().iterator().next();
Object oValue = cache.get(oKey);
assertThat(oValue, is(sValue));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferToQueueWithExpiry(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sValue = "message-1";
long ttl = TimeUnit.MINUTES.toMillis(10);
long start = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(1);
assertThat(queue.offer(sValue, ttl), is(true));
NamedMap<Object, Object> cache = getCollectionCache(queue.getName());
assertThat(cache.size(), is(1));
Object oKey = cache.keySet().iterator().next();
Object oValue = cache.get(oKey);
assertThat(oValue, is(sValue));
long expiry = cache.invoke(oKey, AbstractQueueTests::getEntryExpiry);
long diff = System.currentTimeMillis() - start;
assertThat(expiry, is(greaterThanOrEqualTo(ttl - diff)));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldAddToQueueWithExpiry(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sValue = "message-1";
long ttl = TimeUnit.MINUTES.toMillis(10);
long start = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(1);
assertThat(queue.add(sValue, ttl), is(true));
NamedMap<Object, Object> cache = getCollectionCache(queue.getName());
assertThat(cache.size(), is(1));
Object oKey = cache.keySet().iterator().next();
Object oValue = cache.get(oKey);
assertThat(oValue, is(sValue));
long expiry = cache.invoke(oKey, AbstractQueueTests::getEntryExpiry);
long diff = System.currentTimeMillis() - start;
assertThat(expiry, is(greaterThanOrEqualTo(ttl - diff)));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferToQueueInOrder(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sPrefix = "message-";
int cMessage = 100;
for (int i = 0; i < cMessage; i++)
{
String sValue = sPrefix + i;
assertThat(queue.offer(sValue), is(true));
}
NamedMap<?, ?> cache = getCollectionCache(queue.getName());
assertThat(cache.size(), is(cMessage));
TreeSet<?> setKey = new TreeSet<>(cache.keySet());
assertThat(setKey.size(), is(cMessage));
int i = 0;
for (Object key : setKey)
{
String sExpected = sPrefix + i;
assertThat(cache.get(key), is(sExpected));
i++;
}
}
// ----- test remove() method ------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldRemoveFromEmptyQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
assertThrows(NoSuchElementException.class, queue::remove);
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldRemoveFromQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sValue = "message-1";
queue.offer(sValue);
Object oValue = queue.remove();
assertThat(oValue, is(sValue));
assertThat(queue.isEmpty(), is(true));
assertThat(cache.isEmpty(), is(true));
assertThrows(NoSuchElementException.class, queue::remove);
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldRemoveFromQueueInOrder(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sPrefix = "message-";
long cMessage = 100L;
int nHash = queue.getQueueNameHash();
for (long i = 0; i < cMessage; i++)
{
queue.offer(sPrefix + i);
}
for (long i = 0; i < cMessage; i++)
{
Object oValue = queue.remove();
assertThat(oValue, is(sPrefix + i));
}
assertThat(queue.isEmpty(), is(true));
assertThat(cache.isEmpty(), is(true));
assertThrows(NoSuchElementException.class, queue::remove);
}
// ----- test poll() method ---------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldPollFromEmptyQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
Object oValue = queue.poll();
assertThat(oValue, is(nullValue()));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldPollFromQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sValue = "message-1";
queue.offer(sValue);
Object oValue = queue.poll();
assertThat(oValue, is(sValue));
assertThat(queue.isEmpty(), is(true));
assertThat(cache.isEmpty(), is(true));
oValue = queue.poll();
assertThat(oValue, is(nullValue()));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldPollFromQueueInOrder(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sPrefix = "message-";
long cMessage = 100L;
for (long i = 0; i < cMessage; i++)
{
queue.offer(sPrefix + i);
}
for (long i = 0; i < cMessage; i++)
{
Object oValue = queue.poll();
assertThat(oValue, is(sPrefix + i));
}
assertThat(queue.isEmpty(), is(true));
assertThat(cache.isEmpty(), is(true));
assertThat(queue.poll(), is(nullValue()));
}
// ----- test element() method ------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldGetElementFromEmptyQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
assertThrows(NoSuchElementException.class, queue::element);
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldGetElementFromQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sValue = "message-1";
queue.offer(sValue);
Object oValue = queue.element();
assertThat(oValue, is(sValue));
assertThat(queue.isEmpty(), is(false));
assertThat(cache.isEmpty(), is(false));
oValue = queue.element();
assertThat(oValue, is(sValue));
}
// ----- test peek() method ---------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldPeekAtEmptyQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
Object oValue = queue.peek();
assertThat(oValue, is(nullValue()));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldPeekAtQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sValue = "message-1";
queue.offer(sValue);
Object oKey = cache.keySet().iterator().next();
Object oValue = queue.peek();
assertThat(oValue, is(sValue));
assertThat(queue.isEmpty(), is(false));
assertThat(cache.isEmpty(), is(false));
assertThat(cache.get(oKey), is(sValue));
oValue = queue.peek();
assertThat(oValue, is(sValue));
}
// ----- size limited tests ---------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferAndPollSizeLimitedQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap<QueueKey, String> cache = getCollectionCache(queue.getName());
QueueKey key = QueueKey.head(cache.getName());
long cMax = 100000;
int cBytes = 100;
long cEntry = cMax / cBytes;
assertThat(cache.invoke(key, entry -> AbstractQueueTests.setMaxQueueSize(entry, cMax)), is(true));
assertThat(cache.invoke(key, AbstractQueueTests::getMaxQueueSize), is(cMax));
String sPad = Randoms.getRandomString(cBytes, cBytes, true);
for (int i = 0; i < (cEntry * 2); i++)
{
String sElement = sPad + "-" + i;
if (!queue.offer(sElement))
{
break;
}
}
// should be <= max queue size
Long cUnits = cache.invoke(key, AbstractQueueTests::getCacheUnits);
assertThat(cUnits, is(notNullValue()));
assertThat(cUnits, is(lessThanOrEqualTo(cMax)));
// the queue should be full so an offer should fail
assertThat(queue.offer(sPad + "-X"), is(false));
// poll two then the next offer should succeed
assertThat(queue.poll(), is(notNullValue()));
assertThat(queue.poll(), is(notNullValue()));
// the queue should NOT be full so an offer should succeed
assertThat(queue.offer(sPad + "-X"), is(true));
}
public static Boolean setMaxQueueSize(InvocableMap.Entry<?, ?> entry, long cMaxSize)
{
MapIndex index = entry.asBinaryEntry().getIndexMap().get(QueueKeyExtractor.INSTANCE);
assertThat(index, is(instanceOf(QueueKeyExtractor.QueueIndex.class)));
QueueKeyExtractor.QueueIndex queueIndex = (QueueKeyExtractor.QueueIndex) index;
queueIndex.setMaxQueueSize(cMaxSize);
return true;
}
public static long getMaxQueueSize(InvocableMap.Entry<?, ?> entry)
{
MapIndex index = entry.asBinaryEntry().getIndexMap().get(QueueKeyExtractor.INSTANCE);
assertThat(index, is(instanceOf(QueueKeyExtractor.QueueIndex.class)));
QueueKeyExtractor.QueueIndex queueIndex = (QueueKeyExtractor.QueueIndex) index;
return queueIndex.getMaxQueueSize();
}
@SuppressWarnings({"deprecation", "PatternVariableCanBeUsed"})
public static Long getCacheUnits(InvocableMap.Entry<?, ?> entry)
{
ObservableMap<?, ?> backingMap = entry.asBinaryEntry().getBackingMapContext().getBackingMap();
if (backingMap instanceof ConfigurableCacheMap)
{
ConfigurableCacheMap map = (ConfigurableCacheMap) backingMap;
return (long) map.getUnits() * (long) map.getUnitFactor();
}
return -1L;
}
// ----- other tests ----------------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferAndPoll(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
for (int i = 0; i < 100; i++)
{
String sElement = "message-" + i;
assertThat(queue.offer(sElement), is(true));
assertThat(queue.poll(), is(sElement));
}
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldNotPollExpiredValues(String sSerializer) throws Exception
{
long nTTL = TimeUnit.SECONDS.toMillis(2);
int cMsg = 50;
QueueType queue = getNewCollection(sSerializer);
for (int i = 0; i < cMsg; i++)
{
String sElement1 = "message-" + i;
assertThat(queue.offer(sElement1), is(true));
String sElement2 = "message-" + i + "-expired";
assertThat(queue.offer(sElement2, nTTL), is(true));
}
Thread.sleep(nTTL * 2);
for (int i = 0; i < cMsg; i++)
{
String sElement = "message-" + i;
assertThat(queue.poll(), is(sElement));
}
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldNotPeekExpiredValues(String sSerializer) throws Exception
{
long nTTL = TimeUnit.SECONDS.toMillis(2);
QueueType queue = getNewCollection(sSerializer);
String sElement1 = "message-expired";
assertThat(queue.offer(sElement1, nTTL), is(true));
String sElement2 = "message";
assertThat(queue.offer(sElement2), is(true));
Thread.sleep(nTTL * 2);
assertThat(queue.poll(), is(sElement2));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferPeekAndPoll(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
for (int i = 0; i < 100; i++)
{
String sElement = "message-" + i;
assertThat(queue.offer(sElement), is(true));
assertThat(queue.peek(), is(sElement));
assertThat(queue.poll(), is(sElement));
}
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferAndPollRandomly(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
List listOffer = new ArrayList<>();
List listPolled = new ArrayList<>();
String sPrefix = "message-";
int cOffer = 0;
for (int i = 0; i < 10; i++)
{
int c = m_random.nextInt(10) + 1;
for (int j = 0; j < c; j++)
{
String sValue = sPrefix + cOffer;
assertThat(queue.offer(sValue), is(true));
listOffer.add(sValue);
cOffer++;
}
// poll up to the queue size, so we should not poll a null
c = m_random.nextInt(queue.size());
for (int j = 0; j < c; j++)
{
listPolled.add(queue.poll());
}
}
// poll whatever is left
Object oValue = queue.poll();
while (oValue != null)
{
listPolled.add(oValue);
oValue = queue.poll();
}
assertThat(listPolled, is(listOffer));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferAndPollFromMultipleThreads(String sSerializer) throws Exception
{
int cMessage = 10000;
CountDownLatch latch = new CountDownLatch(1);
QueueType queue = getNewCollection(sSerializer);
OfferRunnable offer = new OfferRunnable(cMessage, latch, queue, NamedQueue::offer);
PollRunnable poll = new PollRunnable(cMessage, latch, queue, NamedQueue::poll);
CompletableFuture<Void> futureOffer = CompletableFuture.runAsync(offer);
CompletableFuture<Void> futurePoll = CompletableFuture.runAsync(poll);
latch.countDown();
futureOffer.get(5, TimeUnit.MINUTES);
futurePoll.get(5, TimeUnit.MINUTES);
List<Object> listOffered = offer.getOffered();
List<Object> listPolled = poll.getPolled();
assertThat(listPolled, is(listOffered));
}
// ----- test iterator() method -----------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldIterateEmptyQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
Iterator iterator = queue.iterator();
assertThat(iterator.hasNext(), is(false));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldIterateQueueInOrder(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
NamedMap cache = getCollectionCache(queue.getName());
String sPrefix = "message-";
int cMessage = (QueuePageIterator.DEFAULT_PAGE_SIZE * 2) + 5;
List<String> expected = new ArrayList<>();
for (long i = 0; i < cMessage; i++)
{
String sValue = sPrefix + i;
queue.offer(sValue);
expected.add(sValue);
}
Iterator<String> iterator = queue.iterator();
assertThat(iterator.hasNext(), is(true));
List<String> actual = new ArrayList<>();
while (iterator.hasNext())
{
actual.add(iterator.next());
}
assertThat(actual, is(expected));
assertThat(queue.size(), is(cMessage));
assertThat(cache.size(), is(cMessage));
}
// ----- test iterator() method -----------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldGetArrayFromEmptyQueue(String sSerializer)
{
NamedQueue<String> queue = getNewCollection(sSerializer);
assertThat(queue.toArray(new String[0]).length, is(0));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldGetArrayFromQueueInOrder(String sSerializer)
{
NamedQueue<String> queue = getNewCollection(sSerializer);
String sPrefix = "message-";
int cMessage = (QueuePageIterator.DEFAULT_PAGE_SIZE * 2) + 5;
String[] expected = new String[cMessage];
for (int i = 0; i < cMessage; i++)
{
String sValue = sPrefix + i;
queue.offer(sValue);
expected[i] = sValue;
}
String[] array = queue.toArray(new String[0]);
assertThat(array, is(expected));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldGetObjectArrayFromQueueInOrder(String sSerializer)
{
NamedQueue<String> queue = getNewCollection(sSerializer);
String sPrefix = "message-";
int cMessage = (QueuePageIterator.DEFAULT_PAGE_SIZE * 2) + 5;
Object[] expected = new String[cMessage];
for (int i = 0; i < cMessage; i++)
{
String sValue = sPrefix + i;
queue.offer(sValue);
expected[i] = sValue;
}
Object[] array = queue.toArray();
assertThat(array, is(expected));
}
// ----- test clear() method --------------------------------------------
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldClearQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
String sValue = "message-1";
assertThat(queue.offer(sValue), is(true));
queue.clear();
assertThat(queue.isEmpty(), is(true));
assertThat(queue.size(), is(0));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldClearLargeQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
int oneMB = 1024 * 1024;
String sValue = Randoms.getRandomString(oneMB, oneMB, true);
for (int i = 0; i < 30; i++)
{
assertThat(queue.offer(sValue), is(true));
}
queue.clear();
assertThat(queue.isEmpty(), is(true));
assertThat(queue.size(), is(0));
}
@ParameterizedTest(name = "{index} serializer={0}")
@MethodSource("serializers")
public void shouldOfferAndPollAfterClearingLargeQueue(String sSerializer)
{
QueueType queue = getNewCollection(sSerializer);
int oneMB = 1024 * 1024;
String sValue = Randoms.getRandomString(oneMB, oneMB, true);
int nCount = 33;
for (int i = 0; i < nCount; i++)
{
assertThat(queue.offer(sValue), is(true));
}
queue.clear();
assertThat(queue.isEmpty(), is(true));
assertThat(queue.size(), is(0));
for (int i = 0; i < nCount; i++)
{
assertThat(queue.offer("message-" + i), is(true));
}
for (int i = 0; i < nCount; i++)
{
assertThat(queue.poll(), is("message-" + i));
}
}
// ----- helper methods -------------------------------------------------
public static long getEntryExpiry(InvocableMap.Entry<Object, Object> entry)
{
return entry.asBinaryEntry().getExpiry();
}
protected boolean isSameNamedMap(NamedMap<?, ?> mapThis, NamedMap<?, ?> mapOther)
{
if (mapThis.getName().equals(mapOther.getName()))
{
CacheService serviceThis = mapThis.getService();
String sNameThis = serviceThis.getInfo().getServiceName();
CacheService serviceOther = mapOther.getService();
String sNameOther = serviceOther.getInfo().getServiceName();
if (sNameThis.equals(sNameOther))
{
ConfigurableCacheFactory ccfThis = serviceThis.getBackingMapManager().getCacheFactory();
ConfigurableCacheFactory ccfOther = serviceOther.getBackingMapManager().getCacheFactory();
if (ccfThis.equals(ccfOther))
{
return true;
}
}
}
return false;
}
// ----- inner class: OfferRunnable -------------------------------------
/**
* A {@link Runnable} that offers to a queue.
*/
@SuppressWarnings("ResultOfMethodCallIgnored")
protected class OfferRunnable
implements Runnable
{
/**
* Create an {@link OfferRunnable}.
*
* @param cElement the number of elements to offer
* @param latch a {@link CountDownLatch} to use to trigger offering
* @param queue the queue to offer to
* @param offerFunction the function that offers the specified value to the specified queue
*/
public OfferRunnable(int cElement,
CountDownLatch latch,
QueueType queue,
BiFunction<QueueType, Object, Boolean> offerFunction)
{
m_cElement = cElement;
m_queue = queue;
m_offerFunction = offerFunction;
m_latch = latch;
}
@Override
public void run()
{
// wait for the latch to trigger start
try
{
m_latch.await(1, TimeUnit.MINUTES);
}
catch (InterruptedException e)
{
throw Exceptions.ensureRuntimeException(e);
}
for (int i = 0; i < m_cElement; i++)
{
String sValue = "message-" + i;
Boolean fResult = m_offerFunction.apply(m_queue, sValue);
assertThat(fResult, is(true));
m_listOffered.add(sValue);
}
}
public List<Object> getOffered()
{
return m_listOffered;
}
// ----- data members -----------------------------------------------
/**
* The number of elements to offer.
*/
private final int m_cElement;
private final QueueType m_queue;
private final CountDownLatch m_latch;
private final BiFunction<QueueType, Object, Boolean> m_offerFunction;
private final List<Object> m_listOffered = new ArrayList<>();
}
// ----- inner class: PollRunnable --------------------------------------
/**
* A {@link Runnable} that polls from a queue.
*/
@SuppressWarnings("ResultOfMethodCallIgnored")
protected class PollRunnable
implements Runnable
{
/**
* Create an {@link PollRunnable}.
*
* @param cElement the number of elements to poll
* @param latch a {@link CountDownLatch} to use to trigger polling
* @param queue the queue to poll to
* @param pollFunction the function that polls the specified value to the specified queue
*/
public PollRunnable(int cElement,
CountDownLatch latch,
QueueType queue,
Function<QueueType, Object> pollFunction)
{
m_cElement = cElement;
m_latch = latch;
m_queue = queue;
m_pollFunction = pollFunction;
}
@Override
public void run()
{
// wait for the latch to trigger start
try
{
m_latch.await(1, TimeUnit.MINUTES);
}
catch (InterruptedException e)
{
throw Exceptions.ensureRuntimeException(e);
}
while (m_listPolled.size() < m_cElement)
{
Object oValue = m_pollFunction.apply(m_queue);
if (oValue != null)
{
m_listPolled.add(oValue);
}
}
}
public List<Object> getPolled()
{
return m_listPolled;
}
// ----- data members -----------------------------------------------
/**
* The number of elements to poll.
*/
private final int m_cElement;
private final QueueType m_queue;
private final CountDownLatch m_latch;
private final Function<QueueType, Object> m_pollFunction;
private final List<Object> m_listPolled = new ArrayList<>();
}
}
|
googleads/google-ads-java | 36,226 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/VideoNonSkippableInStreamAdInfo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/common/ad_type_infos.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.common;
/**
* <pre>
* Representation of video non-skippable in-stream ad format (15 second
* in-stream non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo}
*/
public final class VideoNonSkippableInStreamAdInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo)
VideoNonSkippableInStreamAdInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoNonSkippableInStreamAdInfo.newBuilder() to construct.
private VideoNonSkippableInStreamAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoNonSkippableInStreamAdInfo() {
actionButtonLabel_ = "";
actionHeadline_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new VideoNonSkippableInStreamAdInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoNonSkippableInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.class, com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.Builder.class);
}
private int bitField0_;
public static final int COMPANION_BANNER_FIELD_NUMBER = 5;
private com.google.ads.googleads.v19.common.AdImageAsset companionBanner_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
* @return Whether the companionBanner field is set.
*/
@java.lang.Override
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
* @return The companionBanner.
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AdImageAsset getCompanionBanner() {
return companionBanner_ == null ? com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
return companionBanner_ == null ? com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
public static final int ACTION_BUTTON_LABEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The actionButtonLabel.
*/
@java.lang.Override
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The bytes for actionButtonLabel.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_HEADLINE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The actionHeadline.
*/
@java.lang.Override
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The bytes for actionHeadline.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, actionHeadline_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(5, getCompanionBanner());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, actionHeadline_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, getCompanionBanner());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo other = (com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo) obj;
if (hasCompanionBanner() != other.hasCompanionBanner()) return false;
if (hasCompanionBanner()) {
if (!getCompanionBanner()
.equals(other.getCompanionBanner())) return false;
}
if (!getActionButtonLabel()
.equals(other.getActionButtonLabel())) return false;
if (!getActionHeadline()
.equals(other.getActionHeadline())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCompanionBanner()) {
hash = (37 * hash) + COMPANION_BANNER_FIELD_NUMBER;
hash = (53 * hash) + getCompanionBanner().hashCode();
}
hash = (37 * hash) + ACTION_BUTTON_LABEL_FIELD_NUMBER;
hash = (53 * hash) + getActionButtonLabel().hashCode();
hash = (37 * hash) + ACTION_HEADLINE_FIELD_NUMBER;
hash = (53 * hash) + getActionHeadline().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Representation of video non-skippable in-stream ad format (15 second
* in-stream non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo)
com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoNonSkippableInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.class, com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCompanionBannerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
actionButtonLabel_ = "";
actionHeadline_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo build() {
com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo buildPartial() {
com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo result = new com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.companionBanner_ = companionBannerBuilder_ == null
? companionBanner_
: companionBannerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionButtonLabel_ = actionButtonLabel_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionHeadline_ = actionHeadline_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo) {
return mergeFrom((com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo other) {
if (other == com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo.getDefaultInstance()) return this;
if (other.hasCompanionBanner()) {
mergeCompanionBanner(other.getCompanionBanner());
}
if (!other.getActionButtonLabel().isEmpty()) {
actionButtonLabel_ = other.actionButtonLabel_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionHeadline().isEmpty()) {
actionHeadline_ = other.actionHeadline_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26: {
actionButtonLabel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
case 34: {
actionHeadline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 34
case 42: {
input.readMessage(
getCompanionBannerFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v19.common.AdImageAsset companionBanner_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AdImageAsset, com.google.ads.googleads.v19.common.AdImageAsset.Builder, com.google.ads.googleads.v19.common.AdImageAssetOrBuilder> companionBannerBuilder_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
* @return Whether the companionBanner field is set.
*/
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
* @return The companionBanner.
*/
public com.google.ads.googleads.v19.common.AdImageAsset getCompanionBanner() {
if (companionBannerBuilder_ == null) {
return companionBanner_ == null ? com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
} else {
return companionBannerBuilder_.getMessage();
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder setCompanionBanner(com.google.ads.googleads.v19.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
companionBanner_ = value;
} else {
companionBannerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder setCompanionBanner(
com.google.ads.googleads.v19.common.AdImageAsset.Builder builderForValue) {
if (companionBannerBuilder_ == null) {
companionBanner_ = builderForValue.build();
} else {
companionBannerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder mergeCompanionBanner(com.google.ads.googleads.v19.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
companionBanner_ != null &&
companionBanner_ != com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance()) {
getCompanionBannerBuilder().mergeFrom(value);
} else {
companionBanner_ = value;
}
} else {
companionBannerBuilder_.mergeFrom(value);
}
if (companionBanner_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder clearCompanionBanner() {
bitField0_ = (bitField0_ & ~0x00000001);
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
public com.google.ads.googleads.v19.common.AdImageAsset.Builder getCompanionBannerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompanionBannerFieldBuilder().getBuilder();
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
public com.google.ads.googleads.v19.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
if (companionBannerBuilder_ != null) {
return companionBannerBuilder_.getMessageOrBuilder();
} else {
return companionBanner_ == null ?
com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AdImageAsset, com.google.ads.googleads.v19.common.AdImageAsset.Builder, com.google.ads.googleads.v19.common.AdImageAssetOrBuilder>
getCompanionBannerFieldBuilder() {
if (companionBannerBuilder_ == null) {
companionBannerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AdImageAsset, com.google.ads.googleads.v19.common.AdImageAsset.Builder, com.google.ads.googleads.v19.common.AdImageAssetOrBuilder>(
getCompanionBanner(),
getParentForChildren(),
isClean());
companionBanner_ = null;
}
return companionBannerBuilder_;
}
private java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The actionButtonLabel.
*/
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The bytes for actionButtonLabel.
*/
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @param value The actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return This builder for chaining.
*/
public Builder clearActionButtonLabel() {
actionButtonLabel_ = getDefaultInstance().getActionButtonLabel();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @param value The bytes for actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The actionHeadline.
*/
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The bytes for actionHeadline.
*/
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @param value The actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadline(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return This builder for chaining.
*/
public Builder clearActionHeadline() {
actionHeadline_ = getDefaultInstance().getActionHeadline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @param value The bytes for actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadlineBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo)
private static final com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo();
}
public static com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo>
PARSER = new com.google.protobuf.AbstractParser<VideoNonSkippableInStreamAdInfo>() {
@java.lang.Override
public VideoNonSkippableInStreamAdInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoNonSkippableInStreamAdInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,226 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/VideoNonSkippableInStreamAdInfo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/common/ad_type_infos.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.common;
/**
* <pre>
* Representation of video non-skippable in-stream ad format (15 second
* in-stream non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo}
*/
public final class VideoNonSkippableInStreamAdInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo)
VideoNonSkippableInStreamAdInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoNonSkippableInStreamAdInfo.newBuilder() to construct.
private VideoNonSkippableInStreamAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoNonSkippableInStreamAdInfo() {
actionButtonLabel_ = "";
actionHeadline_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new VideoNonSkippableInStreamAdInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoNonSkippableInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.class, com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.Builder.class);
}
private int bitField0_;
public static final int COMPANION_BANNER_FIELD_NUMBER = 5;
private com.google.ads.googleads.v20.common.AdImageAsset companionBanner_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
* @return Whether the companionBanner field is set.
*/
@java.lang.Override
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
* @return The companionBanner.
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AdImageAsset getCompanionBanner() {
return companionBanner_ == null ? com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
return companionBanner_ == null ? com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
public static final int ACTION_BUTTON_LABEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The actionButtonLabel.
*/
@java.lang.Override
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The bytes for actionButtonLabel.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_HEADLINE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The actionHeadline.
*/
@java.lang.Override
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The bytes for actionHeadline.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, actionHeadline_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(5, getCompanionBanner());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, actionHeadline_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, getCompanionBanner());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo other = (com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo) obj;
if (hasCompanionBanner() != other.hasCompanionBanner()) return false;
if (hasCompanionBanner()) {
if (!getCompanionBanner()
.equals(other.getCompanionBanner())) return false;
}
if (!getActionButtonLabel()
.equals(other.getActionButtonLabel())) return false;
if (!getActionHeadline()
.equals(other.getActionHeadline())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCompanionBanner()) {
hash = (37 * hash) + COMPANION_BANNER_FIELD_NUMBER;
hash = (53 * hash) + getCompanionBanner().hashCode();
}
hash = (37 * hash) + ACTION_BUTTON_LABEL_FIELD_NUMBER;
hash = (53 * hash) + getActionButtonLabel().hashCode();
hash = (37 * hash) + ACTION_HEADLINE_FIELD_NUMBER;
hash = (53 * hash) + getActionHeadline().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Representation of video non-skippable in-stream ad format (15 second
* in-stream non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo)
com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoNonSkippableInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.class, com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCompanionBannerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
actionButtonLabel_ = "";
actionHeadline_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo build() {
com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo buildPartial() {
com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo result = new com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.companionBanner_ = companionBannerBuilder_ == null
? companionBanner_
: companionBannerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionButtonLabel_ = actionButtonLabel_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionHeadline_ = actionHeadline_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo) {
return mergeFrom((com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo other) {
if (other == com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo.getDefaultInstance()) return this;
if (other.hasCompanionBanner()) {
mergeCompanionBanner(other.getCompanionBanner());
}
if (!other.getActionButtonLabel().isEmpty()) {
actionButtonLabel_ = other.actionButtonLabel_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionHeadline().isEmpty()) {
actionHeadline_ = other.actionHeadline_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26: {
actionButtonLabel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
case 34: {
actionHeadline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 34
case 42: {
input.readMessage(
getCompanionBannerFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v20.common.AdImageAsset companionBanner_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AdImageAsset, com.google.ads.googleads.v20.common.AdImageAsset.Builder, com.google.ads.googleads.v20.common.AdImageAssetOrBuilder> companionBannerBuilder_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
* @return Whether the companionBanner field is set.
*/
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
* @return The companionBanner.
*/
public com.google.ads.googleads.v20.common.AdImageAsset getCompanionBanner() {
if (companionBannerBuilder_ == null) {
return companionBanner_ == null ? com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
} else {
return companionBannerBuilder_.getMessage();
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder setCompanionBanner(com.google.ads.googleads.v20.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
companionBanner_ = value;
} else {
companionBannerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder setCompanionBanner(
com.google.ads.googleads.v20.common.AdImageAsset.Builder builderForValue) {
if (companionBannerBuilder_ == null) {
companionBanner_ = builderForValue.build();
} else {
companionBannerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder mergeCompanionBanner(com.google.ads.googleads.v20.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
companionBanner_ != null &&
companionBanner_ != com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance()) {
getCompanionBannerBuilder().mergeFrom(value);
} else {
companionBanner_ = value;
}
} else {
companionBannerBuilder_.mergeFrom(value);
}
if (companionBanner_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder clearCompanionBanner() {
bitField0_ = (bitField0_ & ~0x00000001);
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
public com.google.ads.googleads.v20.common.AdImageAsset.Builder getCompanionBannerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompanionBannerFieldBuilder().getBuilder();
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
public com.google.ads.googleads.v20.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
if (companionBannerBuilder_ != null) {
return companionBannerBuilder_.getMessageOrBuilder();
} else {
return companionBanner_ == null ?
com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AdImageAsset, com.google.ads.googleads.v20.common.AdImageAsset.Builder, com.google.ads.googleads.v20.common.AdImageAssetOrBuilder>
getCompanionBannerFieldBuilder() {
if (companionBannerBuilder_ == null) {
companionBannerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AdImageAsset, com.google.ads.googleads.v20.common.AdImageAsset.Builder, com.google.ads.googleads.v20.common.AdImageAssetOrBuilder>(
getCompanionBanner(),
getParentForChildren(),
isClean());
companionBanner_ = null;
}
return companionBannerBuilder_;
}
private java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The actionButtonLabel.
*/
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The bytes for actionButtonLabel.
*/
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @param value The actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return This builder for chaining.
*/
public Builder clearActionButtonLabel() {
actionButtonLabel_ = getDefaultInstance().getActionButtonLabel();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @param value The bytes for actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The actionHeadline.
*/
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The bytes for actionHeadline.
*/
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @param value The actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadline(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return This builder for chaining.
*/
public Builder clearActionHeadline() {
actionHeadline_ = getDefaultInstance().getActionHeadline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @param value The bytes for actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadlineBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo)
private static final com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo();
}
public static com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo>
PARSER = new com.google.protobuf.AbstractParser<VideoNonSkippableInStreamAdInfo>() {
@java.lang.Override
public VideoNonSkippableInStreamAdInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoNonSkippableInStreamAdInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,226 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/VideoNonSkippableInStreamAdInfo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/common/ad_type_infos.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.common;
/**
* <pre>
* Representation of video non-skippable in-stream ad format (15 second
* in-stream non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo}
*/
public final class VideoNonSkippableInStreamAdInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo)
VideoNonSkippableInStreamAdInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoNonSkippableInStreamAdInfo.newBuilder() to construct.
private VideoNonSkippableInStreamAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoNonSkippableInStreamAdInfo() {
actionButtonLabel_ = "";
actionHeadline_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new VideoNonSkippableInStreamAdInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoNonSkippableInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.class, com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.Builder.class);
}
private int bitField0_;
public static final int COMPANION_BANNER_FIELD_NUMBER = 5;
private com.google.ads.googleads.v21.common.AdImageAsset companionBanner_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
* @return Whether the companionBanner field is set.
*/
@java.lang.Override
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
* @return The companionBanner.
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AdImageAsset getCompanionBanner() {
return companionBanner_ == null ? com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
return companionBanner_ == null ? com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
public static final int ACTION_BUTTON_LABEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The actionButtonLabel.
*/
@java.lang.Override
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The bytes for actionButtonLabel.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_HEADLINE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The actionHeadline.
*/
@java.lang.Override
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The bytes for actionHeadline.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, actionHeadline_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(5, getCompanionBanner());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, actionHeadline_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, getCompanionBanner());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo other = (com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo) obj;
if (hasCompanionBanner() != other.hasCompanionBanner()) return false;
if (hasCompanionBanner()) {
if (!getCompanionBanner()
.equals(other.getCompanionBanner())) return false;
}
if (!getActionButtonLabel()
.equals(other.getActionButtonLabel())) return false;
if (!getActionHeadline()
.equals(other.getActionHeadline())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCompanionBanner()) {
hash = (37 * hash) + COMPANION_BANNER_FIELD_NUMBER;
hash = (53 * hash) + getCompanionBanner().hashCode();
}
hash = (37 * hash) + ACTION_BUTTON_LABEL_FIELD_NUMBER;
hash = (53 * hash) + getActionButtonLabel().hashCode();
hash = (37 * hash) + ACTION_HEADLINE_FIELD_NUMBER;
hash = (53 * hash) + getActionHeadline().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Representation of video non-skippable in-stream ad format (15 second
* in-stream non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo)
com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoNonSkippableInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.class, com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCompanionBannerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
actionButtonLabel_ = "";
actionHeadline_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoNonSkippableInStreamAdInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo build() {
com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo buildPartial() {
com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo result = new com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.companionBanner_ = companionBannerBuilder_ == null
? companionBanner_
: companionBannerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionButtonLabel_ = actionButtonLabel_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionHeadline_ = actionHeadline_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo) {
return mergeFrom((com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo other) {
if (other == com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo.getDefaultInstance()) return this;
if (other.hasCompanionBanner()) {
mergeCompanionBanner(other.getCompanionBanner());
}
if (!other.getActionButtonLabel().isEmpty()) {
actionButtonLabel_ = other.actionButtonLabel_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionHeadline().isEmpty()) {
actionHeadline_ = other.actionHeadline_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26: {
actionButtonLabel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
case 34: {
actionHeadline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 34
case 42: {
input.readMessage(
getCompanionBannerFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v21.common.AdImageAsset companionBanner_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdImageAsset, com.google.ads.googleads.v21.common.AdImageAsset.Builder, com.google.ads.googleads.v21.common.AdImageAssetOrBuilder> companionBannerBuilder_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
* @return Whether the companionBanner field is set.
*/
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
* @return The companionBanner.
*/
public com.google.ads.googleads.v21.common.AdImageAsset getCompanionBanner() {
if (companionBannerBuilder_ == null) {
return companionBanner_ == null ? com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
} else {
return companionBannerBuilder_.getMessage();
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder setCompanionBanner(com.google.ads.googleads.v21.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
companionBanner_ = value;
} else {
companionBannerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder setCompanionBanner(
com.google.ads.googleads.v21.common.AdImageAsset.Builder builderForValue) {
if (companionBannerBuilder_ == null) {
companionBanner_ = builderForValue.build();
} else {
companionBannerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder mergeCompanionBanner(com.google.ads.googleads.v21.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
companionBanner_ != null &&
companionBanner_ != com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance()) {
getCompanionBannerBuilder().mergeFrom(value);
} else {
companionBanner_ = value;
}
} else {
companionBannerBuilder_.mergeFrom(value);
}
if (companionBanner_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
public Builder clearCompanionBanner() {
bitField0_ = (bitField0_ & ~0x00000001);
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
public com.google.ads.googleads.v21.common.AdImageAsset.Builder getCompanionBannerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompanionBannerFieldBuilder().getBuilder();
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
public com.google.ads.googleads.v21.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
if (companionBannerBuilder_ != null) {
return companionBannerBuilder_.getMessageOrBuilder();
} else {
return companionBanner_ == null ?
com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdImageAsset, com.google.ads.googleads.v21.common.AdImageAsset.Builder, com.google.ads.googleads.v21.common.AdImageAssetOrBuilder>
getCompanionBannerFieldBuilder() {
if (companionBannerBuilder_ == null) {
companionBannerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdImageAsset, com.google.ads.googleads.v21.common.AdImageAsset.Builder, com.google.ads.googleads.v21.common.AdImageAssetOrBuilder>(
getCompanionBanner(),
getParentForChildren(),
isClean());
companionBanner_ = null;
}
return companionBannerBuilder_;
}
private java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The actionButtonLabel.
*/
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return The bytes for actionButtonLabel.
*/
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @param value The actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @return This builder for chaining.
*/
public Builder clearActionButtonLabel() {
actionButtonLabel_ = getDefaultInstance().getActionButtonLabel();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 3;</code>
* @param value The bytes for actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The actionHeadline.
*/
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return The bytes for actionHeadline.
*/
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @param value The actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadline(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @return This builder for chaining.
*/
public Builder clearActionHeadline() {
actionHeadline_ = getDefaultInstance().getActionHeadline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the "Call To Action" button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 4;</code>
* @param value The bytes for actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadlineBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo)
private static final com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo();
}
public static com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo>
PARSER = new com.google.protobuf.AbstractParser<VideoNonSkippableInStreamAdInfo>() {
@java.lang.Override
public VideoNonSkippableInStreamAdInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoNonSkippableInStreamAdInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoNonSkippableInStreamAdInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/geode | 35,954 | geode-core/src/main/java/org/apache/geode/internal/cache/AbstractDiskRegion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import static java.lang.System.lineSeparator;
import java.io.PrintStream;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import joptsimple.internal.Strings;
import org.apache.logging.log4j.Logger;
import org.apache.geode.StatisticsFactory;
import org.apache.geode.cache.EvictionAction;
import org.apache.geode.cache.EvictionAlgorithm;
import org.apache.geode.cache.EvictionAttributes;
import org.apache.geode.compression.Compressor;
import org.apache.geode.internal.CopyOnWriteHashSet;
import org.apache.geode.internal.cache.DiskInitFile.DiskRegionFlag;
import org.apache.geode.internal.cache.entries.OffHeapRegionEntry;
import org.apache.geode.internal.cache.persistence.DiskRegionView;
import org.apache.geode.internal.cache.persistence.PersistentMemberID;
import org.apache.geode.internal.cache.persistence.PersistentMemberPattern;
import org.apache.geode.internal.cache.versions.DiskRegionVersionVector;
import org.apache.geode.internal.cache.versions.RegionVersionHolder;
import org.apache.geode.internal.cache.versions.RegionVersionVector;
import org.apache.geode.internal.cache.versions.VersionSource;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.classloader.ClassPathLoader;
import org.apache.geode.internal.logging.log4j.LogMarker;
import org.apache.geode.internal.util.concurrent.ConcurrentMapWithReusableEntries;
import org.apache.geode.logging.internal.log4j.api.LogService;
/**
* Code shared by both DiskRegion and RecoveredDiskRegion.
*
* @since GemFire prPersistSprint2
*/
public abstract class AbstractDiskRegion implements DiskRegionView {
private static final Logger logger = LogService.getLogger();
private final DiskStoreImpl ds;
private final long id;
private long clearOplogEntryId = DiskStoreImpl.INVALID_ID;
private RegionVersionVector clearRVV;
private byte lruAlgorithm;
private byte lruAction;
private int lruLimit;
private int concurrencyLevel = 16;
private int initialCapacity = 16;
private float loadFactor = 0.75f;
private boolean statisticsEnabled;
private boolean isBucket;
/** True if a persistent backup is needed */
private boolean backup;
/** Additional flags that are persisted to the meta-data. */
private final EnumSet<DiskRegionFlag> flags;
/**
* A flag used to indicate that this disk region is being recreated using already existing data on
* the disk.
*/
private boolean isRecreated;
private boolean configChanged;
private boolean aboutToDestroy;
private boolean aboutToDestroyDataStorage;
private String partitionName;
private int startingBucketId;
private String compressorClassName;
private Compressor compressor;
private boolean offHeap;
/**
* Records the version vector of what has been persisted to disk. This may lag behind the version
* vector of what is in memory, because updates may be written asynchronously to disk. We need to
* keep track of exactly what has been written to disk so that we can record a version vector at
* the beginning of each oplog.
*
* The version vector of what is in memory is held in is held in LocalRegion.versionVector.
*/
private RegionVersionVector versionVector;
/**
* A flag whether the current version vector accurately represents what has been written to this
* members disk.
*/
private volatile boolean rvvTrusted = true;
protected AbstractDiskRegion(DiskStoreImpl ds, String name) {
DiskRegionView drv = ds.getDiskInitFile().takeDiskRegionByName(name);
if (drv != null) {
// if we found one in the initFile then we take it out of it and this
// one we are constructing will replace it in the diskStore drMap.
this.ds = drv.getDiskStore();
id = drv.getId();
backup = drv.isBackup();
clearOplogEntryId = drv.getClearOplogEntryId();
clearRVV = drv.getClearRVV();
lruAlgorithm = drv.getLruAlgorithm();
lruAction = drv.getLruAction();
lruLimit = drv.getLruLimit();
concurrencyLevel = drv.getConcurrencyLevel();
initialCapacity = drv.getInitialCapacity();
loadFactor = drv.getLoadFactor();
statisticsEnabled = drv.getStatisticsEnabled();
isBucket = drv.isBucket();
flags = drv.getFlags();
partitionName = drv.getPartitionName();
startingBucketId = drv.getStartingBucketId();
myInitializingId = drv.getMyInitializingID();
myInitializedId = drv.getMyPersistentID();
aboutToDestroy = drv.wasAboutToDestroy();
aboutToDestroyDataStorage = drv.wasAboutToDestroyDataStorage();
onlineMembers = new CopyOnWriteHashSet<>(drv.getOnlineMembers());
offlineMembers = new CopyOnWriteHashSet<>(drv.getOfflineMembers());
equalMembers =
new CopyOnWriteHashSet<>(drv.getOfflineAndEqualMembers());
isRecreated = true;
// Use the same atomic counters as the previous disk region. This ensures that
// updates from threads with a reference to the old region update this disk region
// See 49943
numOverflowOnDisk = ((AbstractDiskRegion) drv).numOverflowOnDisk;
numEntriesInVM = ((AbstractDiskRegion) drv).numEntriesInVM;
numOverflowBytesOnDisk = ((AbstractDiskRegion) drv).numOverflowBytesOnDisk;
entries = drv.getRecoveredEntryMap();
readyForRecovery = drv.isReadyForRecovery();
recoveredEntryCount = drv.getRecoveredEntryCount();
recoveryCompleted = ((AbstractDiskRegion) drv).recoveryCompleted;
versionVector = drv.getRegionVersionVector();
compressorClassName = drv.getCompressorClassName();
compressor = drv.getCompressor();
offHeap = drv.getOffHeap();
if (drv instanceof PlaceHolderDiskRegion) {
setRVVTrusted(((PlaceHolderDiskRegion) drv).getRVVTrusted());
}
} else {
// This is a brand new disk region.
this.ds = ds;
// {
// DiskRegion existingDr = ds.getByName(name);
// if (existingDr != null) {
// throw new IllegalStateException("DiskRegion named " + name + " already exists with id=" +
// existingDr.getId());
// }
// }
id = ds.generateRegionId();
flags = EnumSet.noneOf(DiskRegionFlag.class);
onlineMembers = new CopyOnWriteHashSet<>();
offlineMembers = new CopyOnWriteHashSet<>();
equalMembers = new CopyOnWriteHashSet<>();
isRecreated = false;
versionVector = new DiskRegionVersionVector(ds.getDiskStoreID());
numOverflowOnDisk = new AtomicLong();
numEntriesInVM = new AtomicLong();
numOverflowBytesOnDisk = new AtomicLong();
}
}
protected AbstractDiskRegion(DiskStoreImpl ds, long id) {
this.ds = ds;
this.id = id;
flags = EnumSet.noneOf(DiskRegionFlag.class);
onlineMembers = new CopyOnWriteHashSet<>();
offlineMembers = new CopyOnWriteHashSet<>();
equalMembers = new CopyOnWriteHashSet<>();
isRecreated = true;
backup = true;
versionVector = new DiskRegionVersionVector(ds.getDiskStoreID());
numOverflowOnDisk = new AtomicLong();
numEntriesInVM = new AtomicLong();
numOverflowBytesOnDisk = new AtomicLong();
// We do not initialize the soplog set here. The soplog set needs
// to be handled the complete set of recovered soplogs, which is not available
// at the time a recovered disk region is first created.
}
/**
* Used to initialize a PlaceHolderDiskRegion for a region that is being closed
*
* @param drv the region that is being closed
*/
protected AbstractDiskRegion(DiskRegionView drv) {
ds = drv.getDiskStore();
id = drv.getId();
backup = drv.isBackup();
clearOplogEntryId = drv.getClearOplogEntryId();
clearRVV = drv.getClearRVV();
lruAlgorithm = drv.getLruAlgorithm();
lruAction = drv.getLruAction();
lruLimit = drv.getLruLimit();
concurrencyLevel = drv.getConcurrencyLevel();
initialCapacity = drv.getInitialCapacity();
loadFactor = drv.getLoadFactor();
statisticsEnabled = drv.getStatisticsEnabled();
isBucket = drv.isBucket();
flags = drv.getFlags();
partitionName = drv.getPartitionName();
startingBucketId = drv.getStartingBucketId();
myInitializingId = null; // fixes 43650
myInitializedId = drv.getMyPersistentID();
aboutToDestroy = false;
aboutToDestroyDataStorage = false;
onlineMembers = new CopyOnWriteHashSet<>(drv.getOnlineMembers());
offlineMembers = new CopyOnWriteHashSet<>(drv.getOfflineMembers());
equalMembers = new CopyOnWriteHashSet<>(drv.getOfflineAndEqualMembers());
isRecreated = true;
numOverflowOnDisk = new AtomicLong();
numEntriesInVM = new AtomicLong();
numOverflowBytesOnDisk = new AtomicLong();
entries = drv.getRecoveredEntryMap();
readyForRecovery = drv.isReadyForRecovery();
recoveredEntryCount = 0; // fix for bug 41570
recoveryCompleted = ((AbstractDiskRegion) drv).recoveryCompleted;
versionVector = drv.getRegionVersionVector();
compressorClassName = drv.getCompressorClassName();
compressor = drv.getCompressor();
offHeap = drv.getOffHeap();
}
@Override
public abstract String getName();
@Override
public DiskStoreImpl getDiskStore() {
return ds;
}
abstract void beginDestroyRegion(LocalRegion region);
public void resetRVV() {
versionVector = new DiskRegionVersionVector(ds.getDiskStoreID());
}
@Override
public long getId() {
return id;
}
@Override
public long getClearOplogEntryId() {
return clearOplogEntryId;
}
@Override
public void setClearOplogEntryId(long v) {
clearOplogEntryId = v;
}
@Override
public RegionVersionVector getClearRVV() {
return clearRVV;
}
@Override
public void setClearRVV(RegionVersionVector rvv) {
clearRVV = rvv;
}
@Override
public void setConfig(byte lruAlgorithm, byte lruAction, int lruLimit, int concurrencyLevel,
int initialCapacity, float loadFactor, boolean statisticsEnabled, boolean isBucket,
EnumSet<DiskRegionFlag> flags, String partitionName, int startingBucketId,
String compressorClassName, boolean offHeap) {
this.lruAlgorithm = lruAlgorithm;
this.lruAction = lruAction;
this.lruLimit = lruLimit;
this.concurrencyLevel = concurrencyLevel;
this.initialCapacity = initialCapacity;
this.loadFactor = loadFactor;
this.statisticsEnabled = statisticsEnabled;
this.isBucket = isBucket;
if (flags != null && flags != this.flags) {
this.flags.clear();
this.flags.addAll(flags);
}
this.partitionName = partitionName;
this.startingBucketId = startingBucketId;
this.compressorClassName = compressorClassName;
this.offHeap = offHeap;
if (!ds.isOffline()) {
createCompressorFromClassName();
}
}
public void createCompressorFromClassName() {
if (Strings.isNullOrEmpty(compressorClassName)) {
compressor = null;
} else {
try {
@SuppressWarnings("unchecked")
Class<Compressor> compressorClass =
(Class<Compressor>) ClassPathLoader.getLatest().forName(compressorClassName);
compressor = compressorClass.newInstance();
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(
String.format("Unknown Compressor %s found in disk initialization file.",
compressorClassName),
e);
} catch (InstantiationException e) {
throw new IllegalArgumentException(
String.format("Unknown Compressor %s found in disk initialization file.",
compressorClassName),
e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException(
String.format("Unknown Compressor %s found in disk initialization file.",
compressorClassName),
e);
}
}
}
@Override
public EvictionAttributes getEvictionAttributes() {
return new EvictionAttributesImpl().setAlgorithm(getActualLruAlgorithm())
.setAction(getActualLruAction()).setMaximum(getLruLimit());
}
@Override
public byte getLruAlgorithm() {
return lruAlgorithm;
}
public EvictionAlgorithm getActualLruAlgorithm() {
return EvictionAlgorithm.parseValue(getLruAlgorithm());
}
@Override
public byte getLruAction() {
return lruAction;
}
public EvictionAction getActualLruAction() {
return EvictionAction.parseValue(getLruAction());
}
@Override
public int getLruLimit() {
return lruLimit;
}
@Override
public int getConcurrencyLevel() {
return concurrencyLevel;
}
@Override
public int getInitialCapacity() {
return initialCapacity;
}
@Override
public float getLoadFactor() {
return loadFactor;
}
@Override
public boolean getStatisticsEnabled() {
return statisticsEnabled;
}
@Override
public boolean isBucket() {
return isBucket;
}
@Override
public EnumSet<DiskRegionFlag> getFlags() {
return flags;
}
@Override
public String getPartitionName() {
return partitionName;
}
@Override
public int getStartingBucketId() {
return startingBucketId;
}
public String getPrName() {
assert isBucket();
String bn = PartitionedRegionHelper.getBucketName(getName());
return PartitionedRegionHelper.getPRPath(bn);
}
private PersistentMemberID myInitializingId = null;
private PersistentMemberID myInitializedId = null;
private final CopyOnWriteHashSet<PersistentMemberID> onlineMembers;
private final CopyOnWriteHashSet<PersistentMemberID> offlineMembers;
private final CopyOnWriteHashSet<PersistentMemberID> equalMembers;
@Override
public PersistentMemberID addMyInitializingPMID(PersistentMemberID pmid) {
PersistentMemberID result = myInitializingId;
myInitializingId = pmid;
if (result != null) {
myInitializedId = result;
}
return result;
}
@Override
public void markInitialized() {
assert myInitializingId != null;
myInitializedId = myInitializingId;
myInitializingId = null;
}
@Override
public boolean addOnlineMember(PersistentMemberID pmid) {
return onlineMembers.add(pmid);
}
@Override
public boolean addOfflineMember(PersistentMemberID pmid) {
return offlineMembers.add(pmid);
}
@Override
public boolean addOfflineAndEqualMember(PersistentMemberID pmid) {
return equalMembers.add(pmid);
}
@Override
public boolean rmOnlineMember(PersistentMemberID pmid) {
return onlineMembers.remove(pmid);
}
@Override
public boolean rmOfflineMember(PersistentMemberID pmid) {
return offlineMembers.remove(pmid);
}
@Override
public boolean rmEqualMember(PersistentMemberID pmid) {
return equalMembers.remove(pmid);
}
@Override
public void markBeginDestroyRegion() {
aboutToDestroy = true;
}
@Override
public void markBeginDestroyDataStorage() {
aboutToDestroyDataStorage = true;
}
@Override
public void markEndDestroyRegion() {
onlineMembers.clear();
offlineMembers.clear();
equalMembers.clear();
myInitializedId = null;
myInitializingId = null;
aboutToDestroy = false;
isRecreated = false;
}
@Override
public void markEndDestroyDataStorage() {
myInitializedId = null;
myInitializingId = null;
aboutToDestroyDataStorage = false;
}
// PersistentMemberView methods
@Override
public PersistentMemberID getMyInitializingID() {
DiskInitFile dif = ds.getDiskInitFile();
if (dif == null) {
return myInitializingId;
}
synchronized (dif) {
return myInitializingId;
}
}
@Override
public PersistentMemberID getMyPersistentID() {
DiskInitFile dif = ds.getDiskInitFile();
if (dif == null) {
return myInitializedId;
}
synchronized (dif) {
return myInitializedId;
}
}
@Override
public Set<PersistentMemberID> getOnlineMembers() {
DiskInitFile dif = ds.getDiskInitFile();
if (dif == null) {
return onlineMembers.getSnapshot();
}
synchronized (dif) {
return onlineMembers.getSnapshot();
}
}
@Override
public Set<PersistentMemberID> getOfflineMembers() {
DiskInitFile dif = ds.getDiskInitFile();
if (dif == null) {
return offlineMembers.getSnapshot();
}
synchronized (dif) {
return offlineMembers.getSnapshot();
}
}
@Override
public Set<PersistentMemberID> getOfflineAndEqualMembers() {
DiskInitFile dif = ds.getDiskInitFile();
if (dif == null) {
return equalMembers.getSnapshot();
}
synchronized (dif) {
return equalMembers.getSnapshot();
}
}
@Override
public Set<PersistentMemberPattern> getRevokedMembers() {
DiskInitFile dif = ds.getDiskInitFile();
return ds.getRevokedMembers();
}
@Override
public void memberOffline(PersistentMemberID persistentID) {
ds.memberOffline(this, persistentID);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - member offline {}",
getDiskStoreID().abbrev(), getName(), persistentID);
}
}
@Override
public void memberOfflineAndEqual(PersistentMemberID persistentID) {
ds.memberOfflineAndEqual(this, persistentID);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE,
"PersistentView {} - {} - member offline and equal {}", getDiskStoreID().abbrev(),
getName(), persistentID);
}
}
@Override
public void memberOnline(PersistentMemberID persistentID) {
ds.memberOnline(this, persistentID);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - member online {}",
getDiskStoreID().abbrev(), getName(), persistentID);
}
}
@Override
public void memberRemoved(PersistentMemberID persistentID) {
ds.memberRemoved(this, persistentID);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - member removed {}",
getDiskStoreID().abbrev(), getName(), persistentID);
}
}
@Override
public void memberRevoked(PersistentMemberPattern revokedPattern) {
ds.memberRevoked(revokedPattern);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - member revoked {}",
getDiskStoreID().abbrev(), getName(), revokedPattern);
}
}
@Override
public void setInitializing(PersistentMemberID newId) {
ds.setInitializing(this, newId);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - initializing local id: {}",
getDiskStoreID().abbrev(), getName(), getMyInitializingID());
}
}
@Override
public void setInitialized() {
ds.setInitialized(this);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - initialized local id: {}",
getDiskStoreID().abbrev(), getName(), getMyPersistentID());
}
}
@Override
public PersistentMemberID generatePersistentID() {
return ds.generatePersistentID();
}
@Override
public boolean isRecreated() {
return isRecreated;
}
@Override
public boolean hasConfigChanged() {
return configChanged;
}
@Override
public void setConfigChanged(boolean v) {
configChanged = v;
}
@Override
public void endDestroy(LocalRegion region) {
// Clean up the state if we were ready to recover this region
if (isReadyForRecovery()) {
ds.updateDiskRegion(this);
entriesMapIncompatible = false;
if (entries != null) {
ConcurrentMapWithReusableEntries<Object, Object> other =
entries.getCustomEntryConcurrentHashMap();
for (Map.Entry<Object, Object> me : other.entrySetWithReusableEntries()) {
RegionEntry oldRe = (RegionEntry) me.getValue();
if (oldRe instanceof OffHeapRegionEntry) {
((OffHeapRegionEntry) oldRe).release();
} else {
// no need to keep iterating; they are all either off heap or on heap.
break;
}
}
}
entries = null;
readyForRecovery = false;
}
if (aboutToDestroyDataStorage) {
ds.endDestroyDataStorage(region, (DiskRegion) this);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE,
"PersistentView {} - {} - endDestroyDataStorage: {}", getDiskStoreID().abbrev(),
getName(), getMyPersistentID());
}
} else {
ds.endDestroyRegion(region, (DiskRegion) this);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - endDestroy: {}",
getDiskStoreID().abbrev(), getName(), getMyPersistentID());
}
}
}
/**
* Begin the destroy of everything related to this disk region.
*/
@Override
public void beginDestroy(LocalRegion region) {
beginDestroyRegion(region);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE, "PersistentView {} - {} - beginDestroy: {}",
getDiskStoreID().abbrev(), getName(), getMyPersistentID());
}
if (myInitializedId == null) {
endDestroy(region);
}
}
/**
* Destroy the data storage this this disk region. Destroying the data storage leaves the
* persistent view, but removes the data.
*/
@Override
public void beginDestroyDataStorage() {
ds.beginDestroyDataStorage((DiskRegion) this);
if (logger.isTraceEnabled(LogMarker.PERSIST_VERBOSE)) {
logger.trace(LogMarker.PERSIST_VERBOSE,
"PersistentView {} - {} - beginDestroyDataStorage: {}", getDiskStoreID().abbrev(),
getName(), getMyPersistentID());
}
}
public void createDataStorage() {}
@Override
public boolean wasAboutToDestroy() {
return aboutToDestroy;
}
@Override
public boolean wasAboutToDestroyDataStorage() {
return aboutToDestroyDataStorage;
}
/**
* Set to true once this DiskRegion is ready to be recovered.
*/
private boolean readyForRecovery;
/**
* Total number of entries recovered by restoring from backup. Its initialized right after a
* recovery but may be updated later as recovered entries go away due to updates and destroys.
*/
protected int recoveredEntryCount;
private boolean entriesMapIncompatible;
private RegionMap entries;
private AtomicBoolean recoveryCompleted;
public void setEntriesMapIncompatible(boolean v) {
entriesMapIncompatible = v;
}
@Override
public boolean isEntriesMapIncompatible() {
return entriesMapIncompatible;
}
public RegionMap useExistingRegionMap(LocalRegion lr) {
RegionMap result = null;
if (!entriesMapIncompatible) {
result = entries;
// if (result != null) {
// result.changeOwner(lr);
// }
}
return result;
}
private void waitForRecoveryCompletion() {
boolean interrupted = Thread.interrupted();
synchronized (recoveryCompleted) {
try {
// @todo also check for shutdown of diskstore?
while (!recoveryCompleted.get()) {
try {
recoveryCompleted.wait();
} catch (InterruptedException ex) {
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
}
@Override
public void copyExistingRegionMap(LocalRegion lr) {
waitForRecoveryCompletion();
if (entriesMapIncompatible) {
// Reset the numEntriesInVM. It will be incremented when the copy to the new map is done,
// down in DiskEntry.Help.initialize. However, the other stats can't be updated
// there because we don't have the value length at that point. So leave
// those stats alone.
numEntriesInVM.set(0);
lr.initializeStats(getNumEntriesInVM(), getNumOverflowOnDisk(),
getNumOverflowBytesOnDisk());
lr.copyRecoveredEntries(entries);
} else {
entries.changeOwner(lr);
lr.initializeStats(getNumEntriesInVM(), getNumOverflowOnDisk(),
getNumOverflowBytesOnDisk());
lr.copyRecoveredEntries(null);
}
entries = null;
}
public void setRecoveredEntryMap(RegionMap rm) {
recoveryCompleted = new AtomicBoolean();
entries = rm;
}
@Override
public RegionMap getRecoveredEntryMap() {
return entries;
}
public void releaseRecoveryData() {
readyForRecovery = false;
}
@Override
public boolean isReadyForRecovery() {
// better name for this method would be isRecovering
return readyForRecovery;
}
@Override
public void prepareForRecovery() {
readyForRecovery = true;
}
/**
* gets the number of entries recovered
*
* @since GemFire 3.2.1
*/
@Override
public int getRecoveredEntryCount() {
return recoveredEntryCount;
}
@Override
public void incRecoveredEntryCount() {
recoveredEntryCount++;
}
/**
* initializes the number of entries recovered
*/
@Override
public void initRecoveredEntryCount() {
if (recoveryCompleted != null) {
synchronized (recoveryCompleted) {
recoveryCompleted.set(true);
recoveryCompleted.notifyAll();
}
}
}
protected final AtomicLong numOverflowOnDisk;
@Override
public long getNumOverflowOnDisk() {
return numOverflowOnDisk.get();
}
@Override
public void incNumOverflowOnDisk(long delta) {
numOverflowOnDisk.addAndGet(delta);
}
protected final AtomicLong numOverflowBytesOnDisk;
@Override
public long getNumOverflowBytesOnDisk() {
return numOverflowBytesOnDisk.get();
}
@Override
public void incNumOverflowBytesOnDisk(long delta) {
numOverflowBytesOnDisk.addAndGet(delta);
}
protected final AtomicLong numEntriesInVM;
@Override
public long getNumEntriesInVM() {
return numEntriesInVM.get();
}
@Override
public void incNumEntriesInVM(long delta) {
numEntriesInVM.addAndGet(delta);
}
/**
* Returns true if this region maintains a backup of all its keys and values on disk. Returns
* false if only values that will not fit in memory are written to disk.
*/
@Override
public boolean isBackup() {
return backup;
}
protected void setBackup(boolean v) {
backup = v;
}
public void dump(PrintStream printStream) {
String name = getName();
if (isBucket() && !logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) {
name = getPrName();
}
String msg = name + ":" + " -lru=" + getEvictionAttributes().getAlgorithm();
if (!getEvictionAttributes().getAlgorithm().isNone()) {
msg += " -lruAction=" + getEvictionAttributes().getAction();
if (!getEvictionAttributes().getAlgorithm().isLRUHeap()) {
msg += " -lruLimit=" + getEvictionAttributes().getMaximum();
}
}
msg += " -concurrencyLevel=" + getConcurrencyLevel() + " -initialCapacity="
+ getInitialCapacity() + " -loadFactor=" + getLoadFactor() + " -offHeap=" + getOffHeap()
+ " -compressor=" + (getCompressorClassName() == null ? "none" : getCompressorClassName())
+ " -statisticsEnabled=" + getStatisticsEnabled();
if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) {
msg += " drId=" + getId() + " isBucket=" + isBucket() + " clearEntryId="
+ getClearOplogEntryId() + " MyInitializingID=<" + getMyInitializingID() + ">"
+ " MyPersistentID=<" + getMyPersistentID() + ">" + " onlineMembers=" + getOnlineMembers()
+ " offlineMembers=" + getOfflineMembers() + " equalsMembers="
+ getOfflineAndEqualMembers();
}
printStream.println(msg);
}
public String dump2() {
final String lineSeparator = lineSeparator();
StringBuilder sb = new StringBuilder();
String name = getName();
if (isBucket() && logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) {
name = getPrName();
}
String msg = name + ":" + " -lru=" + getEvictionAttributes().getAlgorithm();
sb.append(name);
sb.append(lineSeparator);
sb.append("lru=" + getEvictionAttributes().getAlgorithm());
sb.append(lineSeparator);
if (!getEvictionAttributes().getAlgorithm().isNone()) {
sb.append("lruAction=" + getEvictionAttributes().getAction());
sb.append(lineSeparator);
if (!getEvictionAttributes().getAlgorithm().isLRUHeap()) {
sb.append("lruAction=" + getEvictionAttributes().getAction());
sb.append(lineSeparator);
}
}
sb.append("-concurrencyLevel=" + getConcurrencyLevel());
sb.append(lineSeparator);
sb.append("-initialCapacity=" + getInitialCapacity());
sb.append(lineSeparator);
sb.append("-loadFactor=" + getLoadFactor());
sb.append(lineSeparator);
sb.append("-offHeap=" + getOffHeap());
sb.append(lineSeparator);
sb.append(
"-compressor=" + (getCompressorClassName() == null ? "none" : getCompressorClassName()));
sb.append(lineSeparator);
sb.append("-statisticsEnabled=" + getStatisticsEnabled());
sb.append(lineSeparator);
if (logger.isTraceEnabled(LogMarker.PERSIST_RECOVERY_VERBOSE)) {
sb.append("drId=" + getId());
sb.append(lineSeparator);
sb.append("isBucket=" + isBucket());
sb.append(lineSeparator);
sb.append("clearEntryId=" + getClearOplogEntryId());
sb.append(lineSeparator);
sb.append("MyInitializingID=<" + getMyInitializingID() + ">");
sb.append(lineSeparator);
sb.append("MyPersistentID=<" + getMyPersistentID() + ">");
sb.append(lineSeparator);
sb.append("onlineMembers=" + getOnlineMembers());
sb.append(lineSeparator);
sb.append("offlineMembers=" + getOfflineMembers());
sb.append(lineSeparator);
sb.append("equalsMembers=" + getOfflineAndEqualMembers());
sb.append(lineSeparator);
sb.append("flags=").append(getFlags());
sb.append(lineSeparator);
}
return sb.toString();
}
public void dumpMetadata() {
String name = getName();
StringBuilder msg = new StringBuilder(name);
dumpCommonAttributes(msg);
dumpPersistentView(msg);
System.out.println(msg);
}
/**
* Dump the (bucket specific) persistent view to the string builder
*/
public void dumpPersistentView(StringBuilder msg) {
msg.append("\n\tMyInitializingID=<").append(getMyInitializingID()).append(">");
msg.append("\n\tMyPersistentID=<").append(getMyPersistentID()).append(">");
msg.append("\n\tonlineMembers:");
for (PersistentMemberID id : getOnlineMembers()) {
msg.append("\n\t\t").append(id);
}
msg.append("\n\tofflineMembers:");
for (PersistentMemberID id : getOfflineMembers()) {
msg.append("\n\t\t").append(id);
}
msg.append("\n\tequalsMembers:");
for (PersistentMemberID id : getOfflineAndEqualMembers()) {
msg.append("\n\t\t").append(id);
}
}
/**
* Dump the attributes which are common across the PR to the string builder.
*/
public void dumpCommonAttributes(StringBuilder msg) {
msg.append("\n\tlru=").append(getEvictionAttributes().getAlgorithm());
if (!getEvictionAttributes().getAlgorithm().isNone()) {
msg.append("\n\tlruAction=").append(getEvictionAttributes().getAction());
if (!getEvictionAttributes().getAlgorithm().isLRUHeap()) {
msg.append("\n\tlruLimit=").append(getEvictionAttributes().getMaximum());
}
}
msg.append("\n\tconcurrencyLevel=").append(getConcurrencyLevel());
msg.append("\n\tinitialCapacity=").append(getInitialCapacity());
msg.append("\n\tloadFactor=").append(getLoadFactor());
msg.append("\n\toffHeap=").append(getOffHeap());
msg.append("\n\tstatisticsEnabled=").append(getStatisticsEnabled());
msg.append("\n\tdrId=").append(getId());
msg.append("\n\tisBucket=").append(isBucket());
msg.append("\n\tclearEntryId=").append(getClearOplogEntryId());
msg.append("\n\tflags=").append(getFlags());
}
/**
* This method was added to fix bug 40192. It is like getBytesAndBits except it will return
* Token.REMOVE_PHASE1 if the htreeReference has changed (which means a clear was done).
*
* @return an instance of BytesAndBits or Token.REMOVED_PHASE1
*/
@Override
public Object getRaw(DiskId id) {
acquireReadLock();
try {
return getDiskStore().getRaw(this, id);
} finally {
releaseReadLock();
}
}
@Override
public RegionVersionVector getRegionVersionVector() {
return versionVector;
}
public long getVersionForMember(VersionSource member) {
return versionVector.getVersionForMember(member);
}
public void recordRecoveredGCVersion(VersionSource member, long gcVersion) {
versionVector.recordGCVersion(member, gcVersion);
}
public void recordRecoveredVersionHolder(VersionSource member, RegionVersionHolder versionHolder,
boolean latestOplog) {
versionVector.initRecoveredVersion(member, versionHolder, latestOplog);
}
public void recordRecoveredVersionTag(VersionTag tag) {
versionVector.recordVersion(tag.getMemberID(), tag.getRegionVersion());
}
/**
* Indicate that the current RVV for this disk region does not accurately reflect what has been
* recorded on disk. This is true while we are in the middle of a GII, because we record the new
* RVV at the beginning of the GII. If we recover in this state, we need to know that the
* recovered RVV is not something we can use to do a delta GII.
*/
public void setRVVTrusted(boolean trusted) {
rvvTrusted = trusted;
}
public boolean getRVVTrusted() {
return rvvTrusted;
}
public PersistentOplogSet getOplogSet() {
return getDiskStore().getPersistentOplogSet(this);
}
@Override
public String getCompressorClassName() {
return compressorClassName;
}
@Override
public Compressor getCompressor() {
return compressor;
}
@Override
public boolean getOffHeap() {
return offHeap;
}
@Override
public CachePerfStats getCachePerfStats() {
return ds.getCache().getCachePerfStats();
}
@Override
public void oplogRecovered(long oplogId) {
// do nothing. Overridden in ExportDiskRegion
}
@Override
public String toString() {
return getClass().getSimpleName() + ":" + getName();
}
@Override
public void incRecentlyUsed() {
entries.incRecentlyUsed();
}
@Override
public StatisticsFactory getStatisticsFactory() {
return ds.getStatisticsFactory();
}
@Override
public String getNameForStats() {
if (isBucket()) {
return getPrName();
} else {
return getName();
}
}
@Override
public InternalCache getCache() {
return getDiskStore().getCache();
}
}
|
googleapis/google-cloud-java | 36,157 | java-video-intelligence/proto-google-cloud-video-intelligence-v1p1beta1/src/main/java/com/google/cloud/videointelligence/v1p1beta1/AnnotateVideoResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p1beta1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1p1beta1;
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse}
*/
public final class AnnotateVideoResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse)
AnnotateVideoResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use AnnotateVideoResponse.newBuilder() to construct.
private AnnotateVideoResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AnnotateVideoResponse() {
annotationResults_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AnnotateVideoResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse.Builder.class);
}
public static final int ANNOTATION_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults>
annotationResults_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults>
getAnnotationResultsList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public int getAnnotationResultsCount() {
return annotationResults_.size();
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults getAnnotationResults(
int index) {
return annotationResults_.get(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
return annotationResults_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotationResults_.size(); i++) {
output.writeMessage(1, annotationResults_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotationResults_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationResults_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse other =
(com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse) obj;
if (!getAnnotationResultsList().equals(other.getAnnotationResultsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationResultsCount() > 0) {
hash = (37 * hash) + ANNOTATION_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationResultsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse)
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse.Builder.class);
}
// Construct using
// com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
} else {
annotationResults_ = null;
annotationResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p1beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse build() {
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse buildPartial() {
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse result =
new com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse result) {
if (annotationResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotationResults_ = java.util.Collections.unmodifiableList(annotationResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotationResults_ = annotationResults_;
} else {
result.annotationResults_ = annotationResultsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse) {
return mergeFrom(
(com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse other) {
if (other
== com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
.getDefaultInstance()) return this;
if (annotationResultsBuilder_ == null) {
if (!other.annotationResults_.isEmpty()) {
if (annotationResults_.isEmpty()) {
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationResultsIsMutable();
annotationResults_.addAll(other.annotationResults_);
}
onChanged();
}
} else {
if (!other.annotationResults_.isEmpty()) {
if (annotationResultsBuilder_.isEmpty()) {
annotationResultsBuilder_.dispose();
annotationResultsBuilder_ = null;
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationResultsFieldBuilder()
: null;
} else {
annotationResultsBuilder_.addAllMessages(other.annotationResults_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults m =
input.readMessage(
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults
.parser(),
extensionRegistry);
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(m);
} else {
annotationResultsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults>
annotationResults_ = java.util.Collections.emptyList();
private void ensureAnnotationResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotationResults_ =
new java.util.ArrayList<
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults>(
annotationResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder>
annotationResultsBuilder_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults>
getAnnotationResultsList() {
if (annotationResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotationResults_);
} else {
return annotationResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public int getAnnotationResultsCount() {
if (annotationResultsBuilder_ == null) {
return annotationResults_.size();
} else {
return annotationResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults getAnnotationResults(
int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index, com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, value);
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index, com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAllAnnotationResults(
java.lang.Iterable<
? extends com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults>
values) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationResults_);
onChanged();
} else {
annotationResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder clearAnnotationResults() {
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder removeAnnotationResults(int index) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.remove(index);
onChanged();
} else {
annotationResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder
getAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
if (annotationResultsBuilder_ != null) {
return annotationResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotationResults_);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder
addAnnotationResultsBuilder() {
return getAnnotationResultsFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder
addAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder()
.addBuilder(
index,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder>
getAnnotationResultsBuilderList() {
return getAnnotationResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsFieldBuilder() {
if (annotationResultsBuilder_ == null) {
annotationResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p1beta1.VideoAnnotationResultsOrBuilder>(
annotationResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
annotationResults_ = null;
}
return annotationResultsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse)
private static final com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse();
}
public static com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AnnotateVideoResponse> PARSER =
new com.google.protobuf.AbstractParser<AnnotateVideoResponse>() {
@java.lang.Override
public AnnotateVideoResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AnnotateVideoResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AnnotateVideoResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,157 | java-video-intelligence/proto-google-cloud-video-intelligence-v1p2beta1/src/main/java/com/google/cloud/videointelligence/v1p2beta1/AnnotateVideoResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1p2beta1;
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse}
*/
public final class AnnotateVideoResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse)
AnnotateVideoResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use AnnotateVideoResponse.newBuilder() to construct.
private AnnotateVideoResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AnnotateVideoResponse() {
annotationResults_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AnnotateVideoResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse.Builder.class);
}
public static final int ANNOTATION_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults>
annotationResults_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults>
getAnnotationResultsList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public int getAnnotationResultsCount() {
return annotationResults_.size();
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults getAnnotationResults(
int index) {
return annotationResults_.get(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
return annotationResults_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotationResults_.size(); i++) {
output.writeMessage(1, annotationResults_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotationResults_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationResults_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse other =
(com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse) obj;
if (!getAnnotationResultsList().equals(other.getAnnotationResultsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationResultsCount() > 0) {
hash = (37 * hash) + ANNOTATION_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationResultsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse)
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse.Builder.class);
}
// Construct using
// com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
} else {
annotationResults_ = null;
annotationResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse build() {
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse buildPartial() {
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse result =
new com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse result) {
if (annotationResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotationResults_ = java.util.Collections.unmodifiableList(annotationResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotationResults_ = annotationResults_;
} else {
result.annotationResults_ = annotationResultsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse) {
return mergeFrom(
(com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse other) {
if (other
== com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
.getDefaultInstance()) return this;
if (annotationResultsBuilder_ == null) {
if (!other.annotationResults_.isEmpty()) {
if (annotationResults_.isEmpty()) {
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationResultsIsMutable();
annotationResults_.addAll(other.annotationResults_);
}
onChanged();
}
} else {
if (!other.annotationResults_.isEmpty()) {
if (annotationResultsBuilder_.isEmpty()) {
annotationResultsBuilder_.dispose();
annotationResultsBuilder_ = null;
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationResultsFieldBuilder()
: null;
} else {
annotationResultsBuilder_.addAllMessages(other.annotationResults_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults m =
input.readMessage(
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults
.parser(),
extensionRegistry);
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(m);
} else {
annotationResultsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults>
annotationResults_ = java.util.Collections.emptyList();
private void ensureAnnotationResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotationResults_ =
new java.util.ArrayList<
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults>(
annotationResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder>
annotationResultsBuilder_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults>
getAnnotationResultsList() {
if (annotationResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotationResults_);
} else {
return annotationResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public int getAnnotationResultsCount() {
if (annotationResultsBuilder_ == null) {
return annotationResults_.size();
} else {
return annotationResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults getAnnotationResults(
int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index, com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, value);
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index, com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAllAnnotationResults(
java.lang.Iterable<
? extends com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults>
values) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationResults_);
onChanged();
} else {
annotationResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder clearAnnotationResults() {
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder removeAnnotationResults(int index) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.remove(index);
onChanged();
} else {
annotationResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder
getAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
if (annotationResultsBuilder_ != null) {
return annotationResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotationResults_);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder
addAnnotationResultsBuilder() {
return getAnnotationResultsFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder
addAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder()
.addBuilder(
index,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder>
getAnnotationResultsBuilderList() {
return getAnnotationResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsFieldBuilder() {
if (annotationResultsBuilder_ == null) {
annotationResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p2beta1.VideoAnnotationResultsOrBuilder>(
annotationResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
annotationResults_ = null;
}
return annotationResultsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse)
private static final com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse();
}
public static com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AnnotateVideoResponse> PARSER =
new com.google.protobuf.AbstractParser<AnnotateVideoResponse>() {
@java.lang.Override
public AnnotateVideoResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AnnotateVideoResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AnnotateVideoResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,157 | java-video-intelligence/proto-google-cloud-video-intelligence-v1p3beta1/src/main/java/com/google/cloud/videointelligence/v1p3beta1/AnnotateVideoResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1p3beta1;
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse}
*/
public final class AnnotateVideoResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse)
AnnotateVideoResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use AnnotateVideoResponse.newBuilder() to construct.
private AnnotateVideoResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AnnotateVideoResponse() {
annotationResults_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AnnotateVideoResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse.Builder.class);
}
public static final int ANNOTATION_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults>
annotationResults_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults>
getAnnotationResultsList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public int getAnnotationResultsCount() {
return annotationResults_.size();
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults getAnnotationResults(
int index) {
return annotationResults_.get(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
return annotationResults_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotationResults_.size(); i++) {
output.writeMessage(1, annotationResults_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotationResults_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationResults_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse other =
(com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse) obj;
if (!getAnnotationResultsList().equals(other.getAnnotationResultsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationResultsCount() > 0) {
hash = (37 * hash) + ANNOTATION_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationResultsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse)
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse.Builder.class);
}
// Construct using
// com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
} else {
annotationResults_ = null;
annotationResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse build() {
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse buildPartial() {
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse result =
new com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse result) {
if (annotationResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotationResults_ = java.util.Collections.unmodifiableList(annotationResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotationResults_ = annotationResults_;
} else {
result.annotationResults_ = annotationResultsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse) {
return mergeFrom(
(com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse other) {
if (other
== com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
.getDefaultInstance()) return this;
if (annotationResultsBuilder_ == null) {
if (!other.annotationResults_.isEmpty()) {
if (annotationResults_.isEmpty()) {
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationResultsIsMutable();
annotationResults_.addAll(other.annotationResults_);
}
onChanged();
}
} else {
if (!other.annotationResults_.isEmpty()) {
if (annotationResultsBuilder_.isEmpty()) {
annotationResultsBuilder_.dispose();
annotationResultsBuilder_ = null;
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationResultsFieldBuilder()
: null;
} else {
annotationResultsBuilder_.addAllMessages(other.annotationResults_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults m =
input.readMessage(
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults
.parser(),
extensionRegistry);
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(m);
} else {
annotationResultsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults>
annotationResults_ = java.util.Collections.emptyList();
private void ensureAnnotationResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotationResults_ =
new java.util.ArrayList<
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults>(
annotationResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder>
annotationResultsBuilder_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults>
getAnnotationResultsList() {
if (annotationResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotationResults_);
} else {
return annotationResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public int getAnnotationResultsCount() {
if (annotationResultsBuilder_ == null) {
return annotationResults_.size();
} else {
return annotationResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults getAnnotationResults(
int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index, com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, value);
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index, com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder
builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAllAnnotationResults(
java.lang.Iterable<
? extends com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults>
values) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationResults_);
onChanged();
} else {
annotationResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder clearAnnotationResults() {
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder removeAnnotationResults(int index) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.remove(index);
onChanged();
} else {
annotationResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder
getAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
if (annotationResultsBuilder_ != null) {
return annotationResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotationResults_);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder
addAnnotationResultsBuilder() {
return getAnnotationResultsFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder
addAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder()
.addBuilder(
index,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder>
getAnnotationResultsBuilderList() {
return getAnnotationResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder>
getAnnotationResultsFieldBuilder() {
if (annotationResultsBuilder_ == null) {
annotationResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResultsOrBuilder>(
annotationResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
annotationResults_ = null;
}
return annotationResultsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse)
private static final com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse();
}
public static com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AnnotateVideoResponse> PARSER =
new com.google.protobuf.AbstractParser<AnnotateVideoResponse>() {
@java.lang.Override
public AnnotateVideoResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AnnotateVideoResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AnnotateVideoResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,062 | java-tasks/proto-google-cloud-tasks-v2/src/main/java/com/google/cloud/tasks/v2/ListQueuesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tasks/v2/cloudtasks.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.tasks.v2;
/**
*
*
* <pre>
* Response message for
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2.ListQueuesResponse}
*/
public final class ListQueuesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tasks.v2.ListQueuesResponse)
ListQueuesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListQueuesResponse.newBuilder() to construct.
private ListQueuesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListQueuesResponse() {
queues_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListQueuesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_ListQueuesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_ListQueuesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2.ListQueuesResponse.class,
com.google.cloud.tasks.v2.ListQueuesResponse.Builder.class);
}
public static final int QUEUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.tasks.v2.Queue> queues_;
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.tasks.v2.Queue> getQueuesList() {
return queues_;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.tasks.v2.QueueOrBuilder>
getQueuesOrBuilderList() {
return queues_;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
@java.lang.Override
public int getQueuesCount() {
return queues_.size();
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
@java.lang.Override
public com.google.cloud.tasks.v2.Queue getQueues(int index) {
return queues_.get(index);
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
@java.lang.Override
public com.google.cloud.tasks.v2.QueueOrBuilder getQueuesOrBuilder(int index) {
return queues_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < queues_.size(); i++) {
output.writeMessage(1, queues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < queues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, queues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tasks.v2.ListQueuesResponse)) {
return super.equals(obj);
}
com.google.cloud.tasks.v2.ListQueuesResponse other =
(com.google.cloud.tasks.v2.ListQueuesResponse) obj;
if (!getQueuesList().equals(other.getQueuesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getQueuesCount() > 0) {
hash = (37 * hash) + QUEUES_FIELD_NUMBER;
hash = (53 * hash) + getQueuesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2.ListQueuesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.tasks.v2.ListQueuesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2.ListQueuesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2.ListQueuesResponse)
com.google.cloud.tasks.v2.ListQueuesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_ListQueuesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_ListQueuesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2.ListQueuesResponse.class,
com.google.cloud.tasks.v2.ListQueuesResponse.Builder.class);
}
// Construct using com.google.cloud.tasks.v2.ListQueuesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (queuesBuilder_ == null) {
queues_ = java.util.Collections.emptyList();
} else {
queues_ = null;
queuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tasks.v2.CloudTasksProto
.internal_static_google_cloud_tasks_v2_ListQueuesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.tasks.v2.ListQueuesResponse getDefaultInstanceForType() {
return com.google.cloud.tasks.v2.ListQueuesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tasks.v2.ListQueuesResponse build() {
com.google.cloud.tasks.v2.ListQueuesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tasks.v2.ListQueuesResponse buildPartial() {
com.google.cloud.tasks.v2.ListQueuesResponse result =
new com.google.cloud.tasks.v2.ListQueuesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.cloud.tasks.v2.ListQueuesResponse result) {
if (queuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
queues_ = java.util.Collections.unmodifiableList(queues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.queues_ = queues_;
} else {
result.queues_ = queuesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.tasks.v2.ListQueuesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tasks.v2.ListQueuesResponse) {
return mergeFrom((com.google.cloud.tasks.v2.ListQueuesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tasks.v2.ListQueuesResponse other) {
if (other == com.google.cloud.tasks.v2.ListQueuesResponse.getDefaultInstance()) return this;
if (queuesBuilder_ == null) {
if (!other.queues_.isEmpty()) {
if (queues_.isEmpty()) {
queues_ = other.queues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureQueuesIsMutable();
queues_.addAll(other.queues_);
}
onChanged();
}
} else {
if (!other.queues_.isEmpty()) {
if (queuesBuilder_.isEmpty()) {
queuesBuilder_.dispose();
queuesBuilder_ = null;
queues_ = other.queues_;
bitField0_ = (bitField0_ & ~0x00000001);
queuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getQueuesFieldBuilder()
: null;
} else {
queuesBuilder_.addAllMessages(other.queues_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.tasks.v2.Queue m =
input.readMessage(com.google.cloud.tasks.v2.Queue.parser(), extensionRegistry);
if (queuesBuilder_ == null) {
ensureQueuesIsMutable();
queues_.add(m);
} else {
queuesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.tasks.v2.Queue> queues_ =
java.util.Collections.emptyList();
private void ensureQueuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
queues_ = new java.util.ArrayList<com.google.cloud.tasks.v2.Queue>(queues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.tasks.v2.Queue,
com.google.cloud.tasks.v2.Queue.Builder,
com.google.cloud.tasks.v2.QueueOrBuilder>
queuesBuilder_;
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public java.util.List<com.google.cloud.tasks.v2.Queue> getQueuesList() {
if (queuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(queues_);
} else {
return queuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public int getQueuesCount() {
if (queuesBuilder_ == null) {
return queues_.size();
} else {
return queuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public com.google.cloud.tasks.v2.Queue getQueues(int index) {
if (queuesBuilder_ == null) {
return queues_.get(index);
} else {
return queuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder setQueues(int index, com.google.cloud.tasks.v2.Queue value) {
if (queuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQueuesIsMutable();
queues_.set(index, value);
onChanged();
} else {
queuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder setQueues(int index, com.google.cloud.tasks.v2.Queue.Builder builderForValue) {
if (queuesBuilder_ == null) {
ensureQueuesIsMutable();
queues_.set(index, builderForValue.build());
onChanged();
} else {
queuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder addQueues(com.google.cloud.tasks.v2.Queue value) {
if (queuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQueuesIsMutable();
queues_.add(value);
onChanged();
} else {
queuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder addQueues(int index, com.google.cloud.tasks.v2.Queue value) {
if (queuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQueuesIsMutable();
queues_.add(index, value);
onChanged();
} else {
queuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder addQueues(com.google.cloud.tasks.v2.Queue.Builder builderForValue) {
if (queuesBuilder_ == null) {
ensureQueuesIsMutable();
queues_.add(builderForValue.build());
onChanged();
} else {
queuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder addQueues(int index, com.google.cloud.tasks.v2.Queue.Builder builderForValue) {
if (queuesBuilder_ == null) {
ensureQueuesIsMutable();
queues_.add(index, builderForValue.build());
onChanged();
} else {
queuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder addAllQueues(
java.lang.Iterable<? extends com.google.cloud.tasks.v2.Queue> values) {
if (queuesBuilder_ == null) {
ensureQueuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, queues_);
onChanged();
} else {
queuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder clearQueues() {
if (queuesBuilder_ == null) {
queues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
queuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public Builder removeQueues(int index) {
if (queuesBuilder_ == null) {
ensureQueuesIsMutable();
queues_.remove(index);
onChanged();
} else {
queuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public com.google.cloud.tasks.v2.Queue.Builder getQueuesBuilder(int index) {
return getQueuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public com.google.cloud.tasks.v2.QueueOrBuilder getQueuesOrBuilder(int index) {
if (queuesBuilder_ == null) {
return queues_.get(index);
} else {
return queuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public java.util.List<? extends com.google.cloud.tasks.v2.QueueOrBuilder>
getQueuesOrBuilderList() {
if (queuesBuilder_ != null) {
return queuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(queues_);
}
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public com.google.cloud.tasks.v2.Queue.Builder addQueuesBuilder() {
return getQueuesFieldBuilder()
.addBuilder(com.google.cloud.tasks.v2.Queue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public com.google.cloud.tasks.v2.Queue.Builder addQueuesBuilder(int index) {
return getQueuesFieldBuilder()
.addBuilder(index, com.google.cloud.tasks.v2.Queue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of queues.
* </pre>
*
* <code>repeated .google.cloud.tasks.v2.Queue queues = 1;</code>
*/
public java.util.List<com.google.cloud.tasks.v2.Queue.Builder> getQueuesBuilderList() {
return getQueuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.tasks.v2.Queue,
com.google.cloud.tasks.v2.Queue.Builder,
com.google.cloud.tasks.v2.QueueOrBuilder>
getQueuesFieldBuilder() {
if (queuesBuilder_ == null) {
queuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.tasks.v2.Queue,
com.google.cloud.tasks.v2.Queue.Builder,
com.google.cloud.tasks.v2.QueueOrBuilder>(
queues_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
queues_ = null;
}
return queuesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
*
* To return the next page of results, call
* [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value
* as the [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token].
*
* If the next_page_token is empty, there are no more results.
*
* The page token is valid for only 2 hours.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2.ListQueuesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListQueuesResponse)
private static final com.google.cloud.tasks.v2.ListQueuesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tasks.v2.ListQueuesResponse();
}
public static com.google.cloud.tasks.v2.ListQueuesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListQueuesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListQueuesResponse>() {
@java.lang.Override
public ListQueuesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListQueuesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListQueuesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tasks.v2.ListQueuesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,087 | java-gkehub/proto-google-cloud-gkehub-v1beta/src/main/java/com/google/cloud/gkehub/v1beta/ListFeaturesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1beta/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1beta;
/**
*
*
* <pre>
* Response message for the `GkeHub.ListFeatures` method.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta.ListFeaturesResponse}
*/
public final class ListFeaturesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1beta.ListFeaturesResponse)
ListFeaturesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListFeaturesResponse.newBuilder() to construct.
private ListFeaturesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListFeaturesResponse() {
resources_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListFeaturesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta.ServiceProto
.internal_static_google_cloud_gkehub_v1beta_ListFeaturesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta.ServiceProto
.internal_static_google_cloud_gkehub_v1beta_ListFeaturesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta.ListFeaturesResponse.class,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse.Builder.class);
}
public static final int RESOURCES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.gkehub.v1beta.Feature> resources_;
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.gkehub.v1beta.Feature> getResourcesList() {
return resources_;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.gkehub.v1beta.FeatureOrBuilder>
getResourcesOrBuilderList() {
return resources_;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
@java.lang.Override
public int getResourcesCount() {
return resources_.size();
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.v1beta.Feature getResources(int index) {
return resources_.get(index);
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureOrBuilder getResourcesOrBuilder(int index) {
return resources_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < resources_.size(); i++) {
output.writeMessage(1, resources_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < resources_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, resources_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1beta.ListFeaturesResponse)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1beta.ListFeaturesResponse other =
(com.google.cloud.gkehub.v1beta.ListFeaturesResponse) obj;
if (!getResourcesList().equals(other.getResourcesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResourcesCount() > 0) {
hash = (37 * hash) + RESOURCES_FIELD_NUMBER;
hash = (53 * hash) + getResourcesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.gkehub.v1beta.ListFeaturesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `GkeHub.ListFeatures` method.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta.ListFeaturesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1beta.ListFeaturesResponse)
com.google.cloud.gkehub.v1beta.ListFeaturesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta.ServiceProto
.internal_static_google_cloud_gkehub_v1beta_ListFeaturesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta.ServiceProto
.internal_static_google_cloud_gkehub_v1beta_ListFeaturesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta.ListFeaturesResponse.class,
com.google.cloud.gkehub.v1beta.ListFeaturesResponse.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1beta.ListFeaturesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resourcesBuilder_ == null) {
resources_ = java.util.Collections.emptyList();
} else {
resources_ = null;
resourcesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1beta.ServiceProto
.internal_static_google_cloud_gkehub_v1beta_ListFeaturesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.ListFeaturesResponse getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1beta.ListFeaturesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.ListFeaturesResponse build() {
com.google.cloud.gkehub.v1beta.ListFeaturesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.ListFeaturesResponse buildPartial() {
com.google.cloud.gkehub.v1beta.ListFeaturesResponse result =
new com.google.cloud.gkehub.v1beta.ListFeaturesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.gkehub.v1beta.ListFeaturesResponse result) {
if (resourcesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
resources_ = java.util.Collections.unmodifiableList(resources_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.resources_ = resources_;
} else {
result.resources_ = resourcesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.gkehub.v1beta.ListFeaturesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1beta.ListFeaturesResponse) {
return mergeFrom((com.google.cloud.gkehub.v1beta.ListFeaturesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1beta.ListFeaturesResponse other) {
if (other == com.google.cloud.gkehub.v1beta.ListFeaturesResponse.getDefaultInstance())
return this;
if (resourcesBuilder_ == null) {
if (!other.resources_.isEmpty()) {
if (resources_.isEmpty()) {
resources_ = other.resources_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResourcesIsMutable();
resources_.addAll(other.resources_);
}
onChanged();
}
} else {
if (!other.resources_.isEmpty()) {
if (resourcesBuilder_.isEmpty()) {
resourcesBuilder_.dispose();
resourcesBuilder_ = null;
resources_ = other.resources_;
bitField0_ = (bitField0_ & ~0x00000001);
resourcesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResourcesFieldBuilder()
: null;
} else {
resourcesBuilder_.addAllMessages(other.resources_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.gkehub.v1beta.Feature m =
input.readMessage(
com.google.cloud.gkehub.v1beta.Feature.parser(), extensionRegistry);
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.add(m);
} else {
resourcesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.gkehub.v1beta.Feature> resources_ =
java.util.Collections.emptyList();
private void ensureResourcesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
resources_ = new java.util.ArrayList<com.google.cloud.gkehub.v1beta.Feature>(resources_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.v1beta.Feature,
com.google.cloud.gkehub.v1beta.Feature.Builder,
com.google.cloud.gkehub.v1beta.FeatureOrBuilder>
resourcesBuilder_;
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public java.util.List<com.google.cloud.gkehub.v1beta.Feature> getResourcesList() {
if (resourcesBuilder_ == null) {
return java.util.Collections.unmodifiableList(resources_);
} else {
return resourcesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public int getResourcesCount() {
if (resourcesBuilder_ == null) {
return resources_.size();
} else {
return resourcesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1beta.Feature getResources(int index) {
if (resourcesBuilder_ == null) {
return resources_.get(index);
} else {
return resourcesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder setResources(int index, com.google.cloud.gkehub.v1beta.Feature value) {
if (resourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourcesIsMutable();
resources_.set(index, value);
onChanged();
} else {
resourcesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder setResources(
int index, com.google.cloud.gkehub.v1beta.Feature.Builder builderForValue) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.set(index, builderForValue.build());
onChanged();
} else {
resourcesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder addResources(com.google.cloud.gkehub.v1beta.Feature value) {
if (resourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourcesIsMutable();
resources_.add(value);
onChanged();
} else {
resourcesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder addResources(int index, com.google.cloud.gkehub.v1beta.Feature value) {
if (resourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourcesIsMutable();
resources_.add(index, value);
onChanged();
} else {
resourcesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder addResources(com.google.cloud.gkehub.v1beta.Feature.Builder builderForValue) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.add(builderForValue.build());
onChanged();
} else {
resourcesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder addResources(
int index, com.google.cloud.gkehub.v1beta.Feature.Builder builderForValue) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.add(index, builderForValue.build());
onChanged();
} else {
resourcesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder addAllResources(
java.lang.Iterable<? extends com.google.cloud.gkehub.v1beta.Feature> values) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, resources_);
onChanged();
} else {
resourcesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder clearResources() {
if (resourcesBuilder_ == null) {
resources_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resourcesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public Builder removeResources(int index) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.remove(index);
onChanged();
} else {
resourcesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1beta.Feature.Builder getResourcesBuilder(int index) {
return getResourcesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1beta.FeatureOrBuilder getResourcesOrBuilder(int index) {
if (resourcesBuilder_ == null) {
return resources_.get(index);
} else {
return resourcesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public java.util.List<? extends com.google.cloud.gkehub.v1beta.FeatureOrBuilder>
getResourcesOrBuilderList() {
if (resourcesBuilder_ != null) {
return resourcesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(resources_);
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1beta.Feature.Builder addResourcesBuilder() {
return getResourcesFieldBuilder()
.addBuilder(com.google.cloud.gkehub.v1beta.Feature.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1beta.Feature.Builder addResourcesBuilder(int index) {
return getResourcesFieldBuilder()
.addBuilder(index, com.google.cloud.gkehub.v1beta.Feature.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1beta.Feature resources = 1;</code>
*/
public java.util.List<com.google.cloud.gkehub.v1beta.Feature.Builder>
getResourcesBuilderList() {
return getResourcesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.v1beta.Feature,
com.google.cloud.gkehub.v1beta.Feature.Builder,
com.google.cloud.gkehub.v1beta.FeatureOrBuilder>
getResourcesFieldBuilder() {
if (resourcesBuilder_ == null) {
resourcesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.v1beta.Feature,
com.google.cloud.gkehub.v1beta.Feature.Builder,
com.google.cloud.gkehub.v1beta.FeatureOrBuilder>(
resources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
resources_ = null;
}
return resourcesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1beta.ListFeaturesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1beta.ListFeaturesResponse)
private static final com.google.cloud.gkehub.v1beta.ListFeaturesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1beta.ListFeaturesResponse();
}
public static com.google.cloud.gkehub.v1beta.ListFeaturesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListFeaturesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListFeaturesResponse>() {
@java.lang.Override
public ListFeaturesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListFeaturesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListFeaturesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.ListFeaturesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 36,334 | jdk/src/share/classes/javax/swing/BufferStrategyPaintManager.java | /*
* Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.*;
import java.lang.reflect.*;
import java.lang.ref.WeakReference;
import java.util.*;
import com.sun.java.swing.SwingUtilities3;
import sun.awt.SubRegionShowable;
import sun.java2d.SunGraphics2D;
import sun.java2d.pipe.hw.ExtendedBufferCapabilities;
import sun.awt.SunToolkit;
import sun.util.logging.PlatformLogger;
/**
* A PaintManager implementation that uses a BufferStrategy for
* rendering.
*
* @author Scott Violet
*/
class BufferStrategyPaintManager extends RepaintManager.PaintManager {
//
// All drawing is done to a BufferStrategy. At the end of painting
// (endPaint) the region that was painted is flushed to the screen
// (using BufferStrategy.show).
//
// PaintManager.show is overriden to show directly from the
// BufferStrategy (when using blit), if successful true is
// returned and a paint event will not be generated. To avoid
// showing from the buffer while painting a locking scheme is
// implemented. When beginPaint is invoked the field painting is
// set to true. If painting is true and show is invoked we
// immediately return false. This is done to avoid blocking the
// toolkit thread while painting happens. In a similar way when
// show is invoked the field showing is set to true, beginPaint
// will then block until showing is true. This scheme ensures we
// only ever have one thread using the BufferStrategy and it also
// ensures the toolkit thread remains as responsive as possible.
//
// If we're using a flip strategy the contents of the backbuffer may
// have changed and so show only attempts to show from the backbuffer
// if we get a blit strategy.
//
//
// Methods used to create BufferStrategy for Applets.
//
private static Method COMPONENT_CREATE_BUFFER_STRATEGY_METHOD;
private static Method COMPONENT_GET_BUFFER_STRATEGY_METHOD;
private static final PlatformLogger LOGGER = PlatformLogger.getLogger(
"javax.swing.BufferStrategyPaintManager");
/**
* List of BufferInfos. We don't use a Map primarily because
* there are typically only a handful of top level components making
* a Map overkill.
*/
private ArrayList<BufferInfo> bufferInfos;
/**
* Indicates <code>beginPaint</code> has been invoked. This is
* set to true for the life of beginPaint/endPaint pair.
*/
private boolean painting;
/**
* Indicates we're in the process of showing. All painting, on the EDT,
* is blocked while this is true.
*/
private boolean showing;
//
// Region that we need to flush. When beginPaint is called these are
// reset and any subsequent calls to paint/copyArea then update these
// fields accordingly. When endPaint is called we then try and show
// the accumulated region.
// These fields are in the coordinate system of the root.
//
private int accumulatedX;
private int accumulatedY;
private int accumulatedMaxX;
private int accumulatedMaxY;
//
// The following fields are set by prepare
//
/**
* Farthest JComponent ancestor for the current paint/copyArea.
*/
private JComponent rootJ;
/**
* Location of component being painted relative to root.
*/
private int xOffset;
/**
* Location of component being painted relative to root.
*/
private int yOffset;
/**
* Graphics from the BufferStrategy.
*/
private Graphics bsg;
/**
* BufferStrategy currently being used.
*/
private BufferStrategy bufferStrategy;
/**
* BufferInfo corresponding to root.
*/
private BufferInfo bufferInfo;
/**
* Set to true if the bufferInfo needs to be disposed when current
* paint loop is done.
*/
private boolean disposeBufferOnEnd;
private static Method getGetBufferStrategyMethod() {
if (COMPONENT_GET_BUFFER_STRATEGY_METHOD == null) {
getMethods();
}
return COMPONENT_GET_BUFFER_STRATEGY_METHOD;
}
private static Method getCreateBufferStrategyMethod() {
if (COMPONENT_CREATE_BUFFER_STRATEGY_METHOD == null) {
getMethods();
}
return COMPONENT_CREATE_BUFFER_STRATEGY_METHOD;
}
private static void getMethods() {
java.security.AccessController.doPrivileged(
new java.security.PrivilegedAction<Object>() {
public Object run() {
try {
COMPONENT_CREATE_BUFFER_STRATEGY_METHOD = Component.class.
getDeclaredMethod("createBufferStrategy",
new Class[] { int.class,
BufferCapabilities.class });
COMPONENT_CREATE_BUFFER_STRATEGY_METHOD.
setAccessible(true);
COMPONENT_GET_BUFFER_STRATEGY_METHOD = Component.class.
getDeclaredMethod("getBufferStrategy");
COMPONENT_GET_BUFFER_STRATEGY_METHOD.setAccessible(true);
} catch (SecurityException e) {
assert false;
} catch (NoSuchMethodException nsme) {
assert false;
}
return null;
}
});
}
BufferStrategyPaintManager() {
bufferInfos = new ArrayList<BufferInfo>(1);
}
//
// PaintManager methods
//
/**
* Cleans up any created BufferStrategies.
*/
protected void dispose() {
// dipose can be invoked at any random time. To avoid
// threading dependancies we do the actual diposing via an
// invokeLater.
SwingUtilities.invokeLater(new Runnable() {
public void run() {
java.util.List<BufferInfo> bufferInfos;
synchronized(BufferStrategyPaintManager.this) {
while (showing) {
try {
BufferStrategyPaintManager.this.wait();
} catch (InterruptedException ie) {
}
}
bufferInfos = BufferStrategyPaintManager.this.bufferInfos;
BufferStrategyPaintManager.this.bufferInfos = null;
}
dispose(bufferInfos);
}
});
}
private void dispose(java.util.List<BufferInfo> bufferInfos) {
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("BufferStrategyPaintManager disposed",
new RuntimeException());
}
if (bufferInfos != null) {
for (BufferInfo bufferInfo : bufferInfos) {
bufferInfo.dispose();
}
}
}
/**
* Shows the specified region of the back buffer. This will return
* true if successful, false otherwise. This is invoked on the
* toolkit thread in response to an expose event.
*/
public boolean show(Container c, int x, int y, int w, int h) {
synchronized(this) {
if (painting) {
// Don't show from backbuffer while in the process of
// painting.
return false;
}
showing = true;
}
try {
BufferInfo info = getBufferInfo(c);
BufferStrategy bufferStrategy;
if (info != null && info.isInSync() &&
(bufferStrategy = info.getBufferStrategy(false)) != null) {
SubRegionShowable bsSubRegion =
(SubRegionShowable)bufferStrategy;
boolean paintAllOnExpose = info.getPaintAllOnExpose();
info.setPaintAllOnExpose(false);
if (bsSubRegion.showIfNotLost(x, y, (x + w), (y + h))) {
return !paintAllOnExpose;
}
// Mark the buffer as needing to be repainted. We don't
// immediately do a repaint as this method will return false
// indicating a PaintEvent should be generated which will
// trigger a complete repaint.
bufferInfo.setContentsLostDuringExpose(true);
}
}
finally {
synchronized(this) {
showing = false;
notifyAll();
}
}
return false;
}
public boolean paint(JComponent paintingComponent,
JComponent bufferComponent, Graphics g,
int x, int y, int w, int h) {
Container root = fetchRoot(paintingComponent);
if (prepare(paintingComponent, root, true, x, y, w, h)) {
if ((g instanceof SunGraphics2D) &&
((SunGraphics2D)g).getDestination() == root) {
// BufferStrategy may have already constrained the Graphics. To
// account for that we revert the constrain, then apply a
// constrain for Swing on top of that.
int cx = ((SunGraphics2D)bsg).constrainX;
int cy = ((SunGraphics2D)bsg).constrainY;
if (cx != 0 || cy != 0) {
bsg.translate(-cx, -cy);
}
((SunGraphics2D)bsg).constrain(xOffset + cx, yOffset + cy,
x + w, y + h);
bsg.setClip(x, y, w, h);
paintingComponent.paintToOffscreen(bsg, x, y, w, h,
x + w, y + h);
accumulate(xOffset + x, yOffset + y, w, h);
return true;
} else {
// Assume they are going to eventually render to the screen.
// This disables showing from backbuffer until a complete
// repaint occurs.
bufferInfo.setInSync(false);
// Fall through to old rendering.
}
}
// Invalid root, do what Swing has always done.
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("prepare failed");
}
return super.paint(paintingComponent, bufferComponent, g, x, y, w, h);
}
public void copyArea(JComponent c, Graphics g, int x, int y, int w, int h,
int deltaX, int deltaY, boolean clip) {
// Note: this method is only called internally and we know that
// g is from a heavyweight Component, so no check is necessary as
// it is in paint() above.
//
// If the buffer isn't in sync there is no point in doing a copyArea,
// it has garbage.
Container root = fetchRoot(c);
if (prepare(c, root, false, 0, 0, 0, 0) && bufferInfo.isInSync()) {
if (clip) {
Rectangle cBounds = c.getVisibleRect();
int relX = xOffset + x;
int relY = yOffset + y;
bsg.clipRect(xOffset + cBounds.x,
yOffset + cBounds.y,
cBounds.width, cBounds.height);
bsg.copyArea(relX, relY, w, h, deltaX, deltaY);
}
else {
bsg.copyArea(xOffset + x, yOffset + y, w, h, deltaX,
deltaY);
}
accumulate(x + xOffset + deltaX, y + yOffset + deltaY, w, h);
} else {
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("copyArea: prepare failed or not in sync");
}
// Prepare failed, or not in sync. By calling super.copyArea
// we'll copy on screen. We need to flush any pending paint to
// the screen otherwise we'll do a copyArea on the wrong thing.
if (!flushAccumulatedRegion()) {
// Flush failed, copyArea will be copying garbage,
// force repaint of all.
rootJ.repaint();
} else {
super.copyArea(c, g, x, y, w, h, deltaX, deltaY, clip);
}
}
}
public void beginPaint() {
synchronized(this) {
painting = true;
// Make sure another thread isn't attempting to show from
// the back buffer.
while(showing) {
try {
wait();
} catch (InterruptedException ie) {
}
}
}
if (LOGGER.isLoggable(PlatformLogger.Level.FINEST)) {
LOGGER.finest("beginPaint");
}
// Reset the area that needs to be painted.
resetAccumulated();
}
public void endPaint() {
if (LOGGER.isLoggable(PlatformLogger.Level.FINEST)) {
LOGGER.finest("endPaint: region " + accumulatedX + " " +
accumulatedY + " " + accumulatedMaxX + " " +
accumulatedMaxY);
}
if (painting) {
if (!flushAccumulatedRegion()) {
if (!isRepaintingRoot()) {
repaintRoot(rootJ);
}
else {
// Contents lost twice in a row, punt.
resetDoubleBufferPerWindow();
// In case we've left junk on the screen, force a repaint.
rootJ.repaint();
}
}
}
BufferInfo toDispose = null;
synchronized(this) {
painting = false;
if (disposeBufferOnEnd) {
disposeBufferOnEnd = false;
toDispose = bufferInfo;
bufferInfos.remove(toDispose);
}
}
if (toDispose != null) {
toDispose.dispose();
}
}
/**
* Renders the BufferStrategy to the screen.
*
* @return true if successful, false otherwise.
*/
private boolean flushAccumulatedRegion() {
boolean success = true;
if (accumulatedX != Integer.MAX_VALUE) {
SubRegionShowable bsSubRegion = (SubRegionShowable)bufferStrategy;
boolean contentsLost = bufferStrategy.contentsLost();
if (!contentsLost) {
bsSubRegion.show(accumulatedX, accumulatedY,
accumulatedMaxX, accumulatedMaxY);
contentsLost = bufferStrategy.contentsLost();
}
if (contentsLost) {
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("endPaint: contents lost");
}
// Shown region was bogus, mark buffer as out of sync.
bufferInfo.setInSync(false);
success = false;
}
}
resetAccumulated();
return success;
}
private void resetAccumulated() {
accumulatedX = Integer.MAX_VALUE;
accumulatedY = Integer.MAX_VALUE;
accumulatedMaxX = 0;
accumulatedMaxY = 0;
}
/**
* Invoked when the double buffering or useTrueDoubleBuffering
* changes for a JRootPane. If the rootpane is not double
* buffered, or true double buffering changes we throw out any
* cache we may have.
*/
public void doubleBufferingChanged(final JRootPane rootPane) {
if ((!rootPane.isDoubleBuffered() ||
!rootPane.getUseTrueDoubleBuffering()) &&
rootPane.getParent() != null) {
if (!SwingUtilities.isEventDispatchThread()) {
Runnable updater = new Runnable() {
public void run() {
doubleBufferingChanged0(rootPane);
}
};
SwingUtilities.invokeLater(updater);
}
else {
doubleBufferingChanged0(rootPane);
}
}
}
/**
* Does the work for doubleBufferingChanged.
*/
private void doubleBufferingChanged0(JRootPane rootPane) {
// This will only happen on the EDT.
BufferInfo info;
synchronized(this) {
// Make sure another thread isn't attempting to show from
// the back buffer.
while(showing) {
try {
wait();
} catch (InterruptedException ie) {
}
}
info = getBufferInfo(rootPane.getParent());
if (painting && bufferInfo == info) {
// We're in the process of painting and the user grabbed
// the Graphics. If we dispose now, endPaint will attempt
// to show a bogus BufferStrategy. Set a flag so that
// endPaint knows it needs to dispose this buffer.
disposeBufferOnEnd = true;
info = null;
} else if (info != null) {
bufferInfos.remove(info);
}
}
if (info != null) {
info.dispose();
}
}
/**
* Calculates information common to paint/copyArea.
*
* @return true if should use buffering per window in painting.
*/
private boolean prepare(JComponent c, Container root, boolean isPaint, int x, int y,
int w, int h) {
if (bsg != null) {
bsg.dispose();
bsg = null;
}
bufferStrategy = null;
if (root != null) {
boolean contentsLost = false;
BufferInfo bufferInfo = getBufferInfo(root);
if (bufferInfo == null) {
contentsLost = true;
bufferInfo = new BufferInfo(root);
bufferInfos.add(bufferInfo);
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("prepare: new BufferInfo: " + root);
}
}
this.bufferInfo = bufferInfo;
if (!bufferInfo.hasBufferStrategyChanged()) {
bufferStrategy = bufferInfo.getBufferStrategy(true);
if (bufferStrategy != null) {
bsg = bufferStrategy.getDrawGraphics();
if (bufferStrategy.contentsRestored()) {
contentsLost = true;
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("prepare: contents restored in prepare");
}
}
}
else {
// Couldn't create BufferStrategy, fallback to normal
// painting.
return false;
}
if (bufferInfo.getContentsLostDuringExpose()) {
contentsLost = true;
bufferInfo.setContentsLostDuringExpose(false);
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("prepare: contents lost on expose");
}
}
if (isPaint && c == rootJ && x == 0 && y == 0 &&
c.getWidth() == w && c.getHeight() == h) {
bufferInfo.setInSync(true);
}
else if (contentsLost) {
// We either recreated the BufferStrategy, or the contents
// of the buffer strategy were restored. We need to
// repaint the root pane so that the back buffer is in sync
// again.
bufferInfo.setInSync(false);
if (!isRepaintingRoot()) {
repaintRoot(rootJ);
}
else {
// Contents lost twice in a row, punt
resetDoubleBufferPerWindow();
}
}
return (bufferInfos != null);
}
}
return false;
}
private Container fetchRoot(JComponent c) {
boolean encounteredHW = false;
rootJ = c;
Container root = c;
xOffset = yOffset = 0;
while (root != null &&
(!(root instanceof Window) &&
!SunToolkit.isInstanceOf(root, "java.applet.Applet"))) {
xOffset += root.getX();
yOffset += root.getY();
root = root.getParent();
if (root != null) {
if (root instanceof JComponent) {
rootJ = (JComponent)root;
}
else if (!root.isLightweight()) {
if (!encounteredHW) {
encounteredHW = true;
}
else {
// We've encountered two hws now and may have
// a containment hierarchy with lightweights containing
// heavyweights containing other lightweights.
// Heavyweights poke holes in lightweight
// rendering so that if we call show on the BS
// (which is associated with the Window) you will
// not see the contents over any child
// heavyweights. If we didn't do this when we
// went to show the descendants of the nested hw
// you would see nothing, so, we bail out here.
return null;
}
}
}
}
if ((root instanceof RootPaneContainer) &&
(rootJ instanceof JRootPane)) {
// We're in a Swing heavyeight (JFrame/JWindow...), use double
// buffering if double buffering enabled on the JRootPane and
// the JRootPane wants true double buffering.
if (rootJ.isDoubleBuffered() &&
((JRootPane)rootJ).getUseTrueDoubleBuffering()) {
// Whether or not a component is double buffered is a
// bit tricky with Swing. This gives a good approximation
// of the various ways to turn on double buffering for
// components.
return root;
}
}
// Don't do true double buffering.
return null;
}
/**
* Turns off double buffering per window.
*/
private void resetDoubleBufferPerWindow() {
if (bufferInfos != null) {
dispose(bufferInfos);
bufferInfos = null;
repaintManager.setPaintManager(null);
}
}
/**
* Returns the BufferInfo for the specified root or null if one
* hasn't been created yet.
*/
private BufferInfo getBufferInfo(Container root) {
for (int counter = bufferInfos.size() - 1; counter >= 0; counter--) {
BufferInfo bufferInfo = bufferInfos.get(counter);
Container biRoot = bufferInfo.getRoot();
if (biRoot == null) {
// Window gc'ed
bufferInfos.remove(counter);
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("BufferInfo pruned, root null");
}
}
else if (biRoot == root) {
return bufferInfo;
}
}
return null;
}
private void accumulate(int x, int y, int w, int h) {
accumulatedX = Math.min(x, accumulatedX);
accumulatedY = Math.min(y, accumulatedY);
accumulatedMaxX = Math.max(accumulatedMaxX, x + w);
accumulatedMaxY = Math.max(accumulatedMaxY, y + h);
}
/**
* BufferInfo is used to track the BufferStrategy being used for
* a particular Component. In addition to tracking the BufferStrategy
* it will install a WindowListener and ComponentListener. When the
* component is hidden/iconified the buffer is marked as needing to be
* completely repainted.
*/
private class BufferInfo extends ComponentAdapter implements
WindowListener {
// NOTE: This class does NOT hold a direct reference to the root, if it
// did there would be a cycle between the BufferPerWindowPaintManager
// and the Window so that it could never be GC'ed
//
// Reference to BufferStrategy is referenced via WeakReference for
// same reason.
private WeakReference<BufferStrategy> weakBS;
private WeakReference<Container> root;
// Indicates whether or not the backbuffer and display are in sync.
// This is set to true when a full repaint on the rootpane is done.
private boolean inSync;
// Indicates the contents were lost during and expose event.
private boolean contentsLostDuringExpose;
// Indicates we need to generate a paint event on expose.
private boolean paintAllOnExpose;
public BufferInfo(Container root) {
this.root = new WeakReference<Container>(root);
root.addComponentListener(this);
if (root instanceof Window) {
((Window)root).addWindowListener(this);
}
}
public void setPaintAllOnExpose(boolean paintAllOnExpose) {
this.paintAllOnExpose = paintAllOnExpose;
}
public boolean getPaintAllOnExpose() {
return paintAllOnExpose;
}
public void setContentsLostDuringExpose(boolean value) {
contentsLostDuringExpose = value;
}
public boolean getContentsLostDuringExpose() {
return contentsLostDuringExpose;
}
public void setInSync(boolean inSync) {
this.inSync = inSync;
}
/**
* Whether or not the contents of the buffer strategy
* is in sync with the window. This is set to true when the root
* pane paints all, and false when contents are lost/restored.
*/
public boolean isInSync() {
return inSync;
}
/**
* Returns the Root (Window or Applet) that this BufferInfo references.
*/
public Container getRoot() {
return (root == null) ? null : root.get();
}
/**
* Returns the BufferStartegy. This will return null if
* the BufferStartegy hasn't been created and <code>create</code> is
* false, or if there is a problem in creating the
* <code>BufferStartegy</code>.
*
* @param create If true, and the BufferStartegy is currently null,
* one will be created.
*/
public BufferStrategy getBufferStrategy(boolean create) {
BufferStrategy bs = (weakBS == null) ? null : weakBS.get();
if (bs == null && create) {
bs = createBufferStrategy();
if (bs != null) {
weakBS = new WeakReference<BufferStrategy>(bs);
}
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("getBufferStrategy: created bs: " + bs);
}
}
return bs;
}
/**
* Returns true if the buffer strategy of the component differs
* from current buffer strategy.
*/
public boolean hasBufferStrategyChanged() {
Container root = getRoot();
if (root != null) {
BufferStrategy ourBS = null;
BufferStrategy componentBS = null;
ourBS = getBufferStrategy(false);
if (root instanceof Window) {
componentBS = ((Window)root).getBufferStrategy();
}
else {
try {
componentBS = (BufferStrategy)
getGetBufferStrategyMethod().invoke(root);
} catch (InvocationTargetException ite) {
assert false;
} catch (IllegalArgumentException iae) {
assert false;
} catch (IllegalAccessException iae2) {
assert false;
}
}
if (componentBS != ourBS) {
// Component has a different BS, dispose ours.
if (ourBS != null) {
ourBS.dispose();
}
weakBS = null;
return true;
}
}
return false;
}
/**
* Creates the BufferStrategy. If the appropriate system property
* has been set we'll try for flip first and then we'll try for
* blit.
*/
private BufferStrategy createBufferStrategy() {
Container root = getRoot();
if (root == null) {
return null;
}
BufferStrategy bs = null;
if (SwingUtilities3.isVsyncRequested(root)) {
bs = createBufferStrategy(root, true);
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("createBufferStrategy: using vsynced strategy");
}
}
if (bs == null) {
bs = createBufferStrategy(root, false);
}
if (!(bs instanceof SubRegionShowable)) {
// We do this for two reasons:
// 1. So that we know we can cast to SubRegionShowable and
// invoke show with the minimal region to update
// 2. To avoid the possibility of invoking client code
// on the toolkit thread.
bs = null;
}
return bs;
}
// Creates and returns a buffer strategy. If
// there is a problem creating the buffer strategy this will
// eat the exception and return null.
private BufferStrategy createBufferStrategy(Container root,
boolean isVsynced) {
BufferCapabilities caps;
if (isVsynced) {
caps = new ExtendedBufferCapabilities(
new ImageCapabilities(true), new ImageCapabilities(true),
BufferCapabilities.FlipContents.COPIED,
ExtendedBufferCapabilities.VSyncType.VSYNC_ON);
} else {
caps = new BufferCapabilities(
new ImageCapabilities(true), new ImageCapabilities(true),
null);
}
BufferStrategy bs = null;
if (SunToolkit.isInstanceOf(root, "java.applet.Applet")) {
try {
getCreateBufferStrategyMethod().invoke(root, 2, caps);
bs = (BufferStrategy)getGetBufferStrategyMethod().
invoke(root);
} catch (InvocationTargetException ite) {
// Type is not supported
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("createBufferStratety failed",
ite);
}
} catch (IllegalArgumentException iae) {
assert false;
} catch (IllegalAccessException iae2) {
assert false;
}
}
else {
try {
((Window)root).createBufferStrategy(2, caps);
bs = ((Window)root).getBufferStrategy();
} catch (AWTException e) {
// Type not supported
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("createBufferStratety failed",
e);
}
}
}
return bs;
}
/**
* Cleans up and removes any references.
*/
public void dispose() {
Container root = getRoot();
if (LOGGER.isLoggable(PlatformLogger.Level.FINER)) {
LOGGER.finer("disposed BufferInfo for: " + root);
}
if (root != null) {
root.removeComponentListener(this);
if (root instanceof Window) {
((Window)root).removeWindowListener(this);
}
BufferStrategy bs = getBufferStrategy(false);
if (bs != null) {
bs.dispose();
}
}
this.root = null;
weakBS = null;
}
// We mark the buffer as needing to be painted on a hide/iconify
// because the developer may have conditionalized painting based on
// visibility.
// Ideally we would also move to having the BufferStrategy being
// a SoftReference in Component here, but that requires changes to
// Component and the like.
public void componentHidden(ComponentEvent e) {
Container root = getRoot();
if (root != null && root.isVisible()) {
// This case will only happen if a developer calls
// hide immediately followed by show. In this case
// the event is delivered after show and the window
// will still be visible. If a developer altered the
// contents of the window between the hide/show
// invocations we won't recognize we need to paint and
// the contents would be bogus. Calling repaint here
// fixs everything up.
root.repaint();
}
else {
setPaintAllOnExpose(true);
}
}
public void windowIconified(WindowEvent e) {
setPaintAllOnExpose(true);
}
// On a dispose we chuck everything.
public void windowClosed(WindowEvent e) {
// Make sure we're not showing.
synchronized(BufferStrategyPaintManager.this) {
while (showing) {
try {
BufferStrategyPaintManager.this.wait();
} catch (InterruptedException ie) {
}
}
bufferInfos.remove(this);
}
dispose();
}
public void windowOpened(WindowEvent e) {
}
public void windowClosing(WindowEvent e) {
}
public void windowDeiconified(WindowEvent e) {
}
public void windowActivated(WindowEvent e) {
}
public void windowDeactivated(WindowEvent e) {
}
}
}
|
googleapis/google-cloud-java | 36,079 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/CollectionItem.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/warehouse.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* A CollectionItem is an item in a collection.
* Each item is a reference to the original resource in a collection.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.CollectionItem}
*/
public final class CollectionItem extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.CollectionItem)
CollectionItemOrBuilder {
private static final long serialVersionUID = 0L;
// Use CollectionItem.newBuilder() to construct.
private CollectionItem(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CollectionItem() {
collection_ = "";
type_ = 0;
itemResource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CollectionItem();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_CollectionItem_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_CollectionItem_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.CollectionItem.class,
com.google.cloud.visionai.v1.CollectionItem.Builder.class);
}
/**
*
*
* <pre>
* CollectionItem types.
* </pre>
*
* Protobuf enum {@code google.cloud.visionai.v1.CollectionItem.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* The default type of item should never happen.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Asset type item.
* </pre>
*
* <code>ASSET = 1;</code>
*/
ASSET(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* The default type of item should never happen.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Asset type item.
* </pre>
*
* <code>ASSET = 1;</code>
*/
public static final int ASSET_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return ASSET;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.visionai.v1.CollectionItem.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.visionai.v1.CollectionItem.Type)
}
public static final int COLLECTION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object collection_ = "";
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The collection.
*/
@java.lang.Override
public java.lang.String getCollection() {
java.lang.Object ref = collection_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
collection_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for collection.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCollectionBytes() {
java.lang.Object ref = collection_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
collection_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 2;
private int type_ = 0;
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem.Type getType() {
com.google.cloud.visionai.v1.CollectionItem.Type result =
com.google.cloud.visionai.v1.CollectionItem.Type.forNumber(type_);
return result == null ? com.google.cloud.visionai.v1.CollectionItem.Type.UNRECOGNIZED : result;
}
public static final int ITEM_RESOURCE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object itemResource_ = "";
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The itemResource.
*/
@java.lang.Override
public java.lang.String getItemResource() {
java.lang.Object ref = itemResource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
itemResource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for itemResource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getItemResourceBytes() {
java.lang.Object ref = itemResource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
itemResource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(collection_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, collection_);
}
if (type_ != com.google.cloud.visionai.v1.CollectionItem.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, type_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(itemResource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, itemResource_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(collection_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, collection_);
}
if (type_ != com.google.cloud.visionai.v1.CollectionItem.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, type_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(itemResource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, itemResource_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.CollectionItem)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.CollectionItem other =
(com.google.cloud.visionai.v1.CollectionItem) obj;
if (!getCollection().equals(other.getCollection())) return false;
if (type_ != other.type_) return false;
if (!getItemResource().equals(other.getItemResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + COLLECTION_FIELD_NUMBER;
hash = (53 * hash) + getCollection().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + ITEM_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getItemResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.CollectionItem parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.CollectionItem parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.CollectionItem parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.CollectionItem prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A CollectionItem is an item in a collection.
* Each item is a reference to the original resource in a collection.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.CollectionItem}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.CollectionItem)
com.google.cloud.visionai.v1.CollectionItemOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_CollectionItem_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_CollectionItem_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.CollectionItem.class,
com.google.cloud.visionai.v1.CollectionItem.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.CollectionItem.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
collection_ = "";
type_ = 0;
itemResource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_CollectionItem_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.CollectionItem.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem build() {
com.google.cloud.visionai.v1.CollectionItem result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem buildPartial() {
com.google.cloud.visionai.v1.CollectionItem result =
new com.google.cloud.visionai.v1.CollectionItem(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.CollectionItem result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.collection_ = collection_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.itemResource_ = itemResource_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.CollectionItem) {
return mergeFrom((com.google.cloud.visionai.v1.CollectionItem) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.CollectionItem other) {
if (other == com.google.cloud.visionai.v1.CollectionItem.getDefaultInstance()) return this;
if (!other.getCollection().isEmpty()) {
collection_ = other.collection_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (!other.getItemResource().isEmpty()) {
itemResource_ = other.itemResource_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
collection_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
type_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
itemResource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object collection_ = "";
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The collection.
*/
public java.lang.String getCollection() {
java.lang.Object ref = collection_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
collection_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for collection.
*/
public com.google.protobuf.ByteString getCollectionBytes() {
java.lang.Object ref = collection_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
collection_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The collection to set.
* @return This builder for chaining.
*/
public Builder setCollection(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
collection_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearCollection() {
collection_ = getDefaultInstance().getCollection();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The collection name that this item belongs to. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/collections/{collection}`
* </pre>
*
* <code>
* string collection = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for collection to set.
* @return This builder for chaining.
*/
public Builder setCollectionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
collection_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int type_ = 0;
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem.Type getType() {
com.google.cloud.visionai.v1.CollectionItem.Type result =
com.google.cloud.visionai.v1.CollectionItem.Type.forNumber(type_);
return result == null
? com.google.cloud.visionai.v1.CollectionItem.Type.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.cloud.visionai.v1.CollectionItem.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The type of item.
* </pre>
*
* <code>
* .google.cloud.visionai.v1.CollectionItem.Type type = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000002);
type_ = 0;
onChanged();
return this;
}
private java.lang.Object itemResource_ = "";
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The itemResource.
*/
public java.lang.String getItemResource() {
java.lang.Object ref = itemResource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
itemResource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for itemResource.
*/
public com.google.protobuf.ByteString getItemResourceBytes() {
java.lang.Object ref = itemResource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
itemResource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The itemResource to set.
* @return This builder for chaining.
*/
public Builder setItemResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
itemResource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearItemResource() {
itemResource_ = getDefaultInstance().getItemResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the CollectionItem. Its format depends on the `type`
* above. For ASSET:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/assets/{asset}`
* </pre>
*
* <code>string item_resource = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for itemResource to set.
* @return This builder for chaining.
*/
public Builder setItemResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
itemResource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.CollectionItem)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.CollectionItem)
private static final com.google.cloud.visionai.v1.CollectionItem DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.CollectionItem();
}
public static com.google.cloud.visionai.v1.CollectionItem getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CollectionItem> PARSER =
new com.google.protobuf.AbstractParser<CollectionItem>() {
@java.lang.Override
public CollectionItem parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CollectionItem> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CollectionItem> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.CollectionItem getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/pinot | 35,932 | pinot-plugins/pinot-input-format/pinot-thrift/src/test/java/org/apache/pinot/plugin/inputformat/thrift/ThriftSampleData.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//
package org.apache.pinot.plugin.inputformat.thrift;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TBase;
import org.apache.thrift.TBaseHelper;
import org.apache.thrift.TException;
import org.apache.thrift.TFieldIdEnum;
import org.apache.thrift.meta_data.FieldMetaData;
import org.apache.thrift.meta_data.FieldValueMetaData;
import org.apache.thrift.meta_data.ListMetaData;
import org.apache.thrift.meta_data.MapMetaData;
import org.apache.thrift.meta_data.SetMetaData;
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.protocol.TField;
import org.apache.thrift.protocol.TList;
import org.apache.thrift.protocol.TMap;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.protocol.TProtocolUtil;
import org.apache.thrift.protocol.TSet;
import org.apache.thrift.protocol.TStruct;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.transport.TIOStreamTransport;
public class ThriftSampleData implements TBase<ThriftSampleData, ThriftSampleData._Fields>, Serializable, Cloneable, Comparable<ThriftSampleData> {
private static final TStruct STRUCT_DESC = new TStruct("ThriftSampleData");
private static final TField ID_FIELD_DESC = new TField("id", (byte) 8, (short) 1);
private static final TField NAME_FIELD_DESC = new TField("name", (byte) 11, (short) 2);
private static final TField CREATED_AT_FIELD_DESC = new TField("created_at", (byte) 10, (short) 3);
private static final TField ACTIVE_FIELD_DESC = new TField("active", (byte) 2, (short) 4);
private static final TField GROUPS_FIELD_DESC = new TField("groups", (byte) 15, (short) 5);
private static final TField MAP_VALUES_FIELD_DESC = new TField("map_values", (byte) 13, (short) 6);
private static final TField SET_VALUES_FIELD_DESC = new TField("set_values", (byte) 14, (short) 7);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap();
public int id;
public String name;
public long created_at;
public boolean active;
public List<Short> groups;
public Map<String, Long> map_values;
public Set<String> set_values;
private static final int __ID_ISSET_ID = 0;
private static final int __CREATED_AT_ISSET_ID = 1;
private static final int __ACTIVE_ISSET_ID = 2;
private byte __isset_bitfield;
private static final ThriftSampleData._Fields[] optionals;
public static final Map<ThriftSampleData._Fields, FieldMetaData> metaDataMap;
public ThriftSampleData() {
this.__isset_bitfield = 0;
}
public ThriftSampleData(List<Short> groups, Map<String, Long> map_values, Set<String> set_values) {
this();
this.groups = groups;
this.map_values = map_values;
this.set_values = set_values;
}
public ThriftSampleData(ThriftSampleData other) {
this.__isset_bitfield = 0;
this.__isset_bitfield = other.__isset_bitfield;
this.id = other.id;
if (other.isSetName()) {
this.name = other.name;
}
this.created_at = other.created_at;
this.active = other.active;
if (other.isSetGroups()) {
List<Short> __this__groups = new ArrayList(other.groups);
this.groups = __this__groups;
}
if (other.isSetMap_values()) {
Map<String, Long> __this__map_values = new HashMap(other.map_values);
this.map_values = __this__map_values;
}
if (other.isSetSet_values()) {
Set<String> __this__set_values = new HashSet(other.set_values);
this.set_values = __this__set_values;
}
}
public ThriftSampleData deepCopy() {
return new ThriftSampleData(this);
}
public void clear() {
this.setIdIsSet(false);
this.id = 0;
this.name = null;
this.setCreated_atIsSet(false);
this.created_at = 0L;
this.setActiveIsSet(false);
this.active = false;
this.groups = null;
this.map_values = null;
this.set_values = null;
}
public int getId() {
return this.id;
}
public ThriftSampleData setId(int id) {
this.id = id;
this.setIdIsSet(true);
return this;
}
public void unsetId() {
this.__isset_bitfield = EncodingUtils.clearBit(this.__isset_bitfield, 0);
}
public boolean isSetId() {
return EncodingUtils.testBit(this.__isset_bitfield, 0);
}
public void setIdIsSet(boolean value) {
this.__isset_bitfield = EncodingUtils.setBit(this.__isset_bitfield, 0, value);
}
public String getName() {
return this.name;
}
public ThriftSampleData setName(String name) {
this.name = name;
return this;
}
public void unsetName() {
this.name = null;
}
public boolean isSetName() {
return this.name != null;
}
public void setNameIsSet(boolean value) {
if (!value) {
this.name = null;
}
}
public long getCreated_at() {
return this.created_at;
}
public ThriftSampleData setCreated_at(long created_at) {
this.created_at = created_at;
this.setCreated_atIsSet(true);
return this;
}
public void unsetCreated_at() {
this.__isset_bitfield = EncodingUtils.clearBit(this.__isset_bitfield, 1);
}
public boolean isSetCreated_at() {
return EncodingUtils.testBit(this.__isset_bitfield, 1);
}
public void setCreated_atIsSet(boolean value) {
this.__isset_bitfield = EncodingUtils.setBit(this.__isset_bitfield, 1, value);
}
public boolean isActive() {
return this.active;
}
public ThriftSampleData setActive(boolean active) {
this.active = active;
this.setActiveIsSet(true);
return this;
}
public void unsetActive() {
this.__isset_bitfield = EncodingUtils.clearBit(this.__isset_bitfield, 2);
}
public boolean isSetActive() {
return EncodingUtils.testBit(this.__isset_bitfield, 2);
}
public void setActiveIsSet(boolean value) {
this.__isset_bitfield = EncodingUtils.setBit(this.__isset_bitfield, 2, value);
}
public int getGroupsSize() {
return this.groups == null ? 0 : this.groups.size();
}
public Iterator<Short> getGroupsIterator() {
return this.groups == null ? null : this.groups.iterator();
}
public void addToGroups(short elem) {
if (this.groups == null) {
this.groups = new ArrayList();
}
this.groups.add(elem);
}
public List<Short> getGroups() {
return this.groups;
}
public ThriftSampleData setGroups(List<Short> groups) {
this.groups = groups;
return this;
}
public void unsetGroups() {
this.groups = null;
}
public boolean isSetGroups() {
return this.groups != null;
}
public void setGroupsIsSet(boolean value) {
if (!value) {
this.groups = null;
}
}
public int getMap_valuesSize() {
return this.map_values == null ? 0 : this.map_values.size();
}
public void putToMap_values(String key, long val) {
if (this.map_values == null) {
this.map_values = new HashMap();
}
this.map_values.put(key, val);
}
public Map<String, Long> getMap_values() {
return this.map_values;
}
public ThriftSampleData setMap_values(Map<String, Long> map_values) {
this.map_values = map_values;
return this;
}
public void unsetMap_values() {
this.map_values = null;
}
public boolean isSetMap_values() {
return this.map_values != null;
}
public void setMap_valuesIsSet(boolean value) {
if (!value) {
this.map_values = null;
}
}
public int getSet_valuesSize() {
return this.set_values == null ? 0 : this.set_values.size();
}
public Iterator<String> getSet_valuesIterator() {
return this.set_values == null ? null : this.set_values.iterator();
}
public void addToSet_values(String elem) {
if (this.set_values == null) {
this.set_values = new HashSet();
}
this.set_values.add(elem);
}
public Set<String> getSet_values() {
return this.set_values;
}
public ThriftSampleData setSet_values(Set<String> set_values) {
this.set_values = set_values;
return this;
}
public void unsetSet_values() {
this.set_values = null;
}
public boolean isSetSet_values() {
return this.set_values != null;
}
public void setSet_valuesIsSet(boolean value) {
if (!value) {
this.set_values = null;
}
}
public void setFieldValue(ThriftSampleData._Fields field, Object value) {
switch (field) {
case ID:
if (value == null) {
this.unsetId();
} else {
this.setId(((Integer) value).intValue());
}
break;
case NAME:
if (value == null) {
this.unsetName();
} else {
this.setName((String) value);
}
break;
case CREATED_AT:
if (value == null) {
this.unsetCreated_at();
} else {
this.setCreated_at(((Long) value).longValue());
}
break;
case ACTIVE:
if (value == null) {
this.unsetActive();
} else {
this.setActive(((Boolean) value).booleanValue());
}
break;
case GROUPS:
if (value == null) {
this.unsetGroups();
} else {
this.setGroups((List) value);
}
break;
case MAP_VALUES:
if (value == null) {
this.unsetMap_values();
} else {
this.setMap_values((Map) value);
}
break;
case SET_VALUES:
if (value == null) {
this.unsetSet_values();
} else {
this.setSet_values((Set) value);
}
}
}
public Object getFieldValue(ThriftSampleData._Fields field) {
switch (field) {
case ID:
return this.getId();
case NAME:
return this.getName();
case CREATED_AT:
return this.getCreated_at();
case ACTIVE:
return this.isActive();
case GROUPS:
return this.getGroups();
case MAP_VALUES:
return this.getMap_values();
case SET_VALUES:
return this.getSet_values();
default:
throw new IllegalStateException();
}
}
public boolean isSet(ThriftSampleData._Fields field) {
if (field == null) {
throw new IllegalArgumentException();
} else {
switch (field) {
case ID:
return this.isSetId();
case NAME:
return this.isSetName();
case CREATED_AT:
return this.isSetCreated_at();
case ACTIVE:
return this.isSetActive();
case GROUPS:
return this.isSetGroups();
case MAP_VALUES:
return this.isSetMap_values();
case SET_VALUES:
return this.isSetSet_values();
default:
throw new IllegalStateException();
}
}
}
public boolean equals(Object that) {
if (that == null) {
return false;
} else {
return that instanceof ThriftSampleData ? this.equals((ThriftSampleData) that) : false;
}
}
public boolean equals(ThriftSampleData that) {
if (that == null) {
return false;
} else {
boolean this_present_id = this.isSetId();
boolean that_present_id = that.isSetId();
if (this_present_id || that_present_id) {
if (!this_present_id || !that_present_id) {
return false;
}
if (this.id != that.id) {
return false;
}
}
boolean this_present_name = this.isSetName();
boolean that_present_name = that.isSetName();
if (this_present_name || that_present_name) {
if (!this_present_name || !that_present_name) {
return false;
}
if (!this.name.equals(that.name)) {
return false;
}
}
boolean this_present_created_at = this.isSetCreated_at();
boolean that_present_created_at = that.isSetCreated_at();
if (this_present_created_at || that_present_created_at) {
if (!this_present_created_at || !that_present_created_at) {
return false;
}
if (this.created_at != that.created_at) {
return false;
}
}
boolean this_present_active = this.isSetActive();
boolean that_present_active = that.isSetActive();
if (this_present_active || that_present_active) {
if (!this_present_active || !that_present_active) {
return false;
}
if (this.active != that.active) {
return false;
}
}
boolean this_present_groups = this.isSetGroups();
boolean that_present_groups = that.isSetGroups();
if (this_present_groups || that_present_groups) {
if (!this_present_groups || !that_present_groups) {
return false;
}
if (!this.groups.equals(that.groups)) {
return false;
}
}
boolean this_present_map_values = this.isSetMap_values();
boolean that_present_map_values = that.isSetMap_values();
if (this_present_map_values || that_present_map_values) {
if (!this_present_map_values || !that_present_map_values) {
return false;
}
if (!this.map_values.equals(that.map_values)) {
return false;
}
}
boolean this_present_set_values = this.isSetSet_values();
boolean that_present_set_values = that.isSetSet_values();
if (this_present_set_values || that_present_set_values) {
if (!this_present_set_values || !that_present_set_values) {
return false;
}
if (!this.set_values.equals(that.set_values)) {
return false;
}
}
return true;
}
}
public int hashCode() {
List<Object> list = new ArrayList();
boolean present_id = this.isSetId();
list.add(present_id);
if (present_id) {
list.add(this.id);
}
boolean present_name = this.isSetName();
list.add(present_name);
if (present_name) {
list.add(this.name);
}
boolean present_created_at = this.isSetCreated_at();
list.add(present_created_at);
if (present_created_at) {
list.add(this.created_at);
}
boolean present_active = this.isSetActive();
list.add(present_active);
if (present_active) {
list.add(this.active);
}
boolean present_groups = this.isSetGroups();
list.add(present_groups);
if (present_groups) {
list.add(this.groups);
}
boolean present_map_values = this.isSetMap_values();
list.add(present_map_values);
if (present_map_values) {
list.add(this.map_values);
}
boolean present_set_values = this.isSetSet_values();
list.add(present_set_values);
if (present_set_values) {
list.add(this.set_values);
}
return list.hashCode();
}
public int compareTo(ThriftSampleData other) {
if (!this.getClass().equals(other.getClass())) {
return this.getClass().getName().compareTo(other.getClass().getName());
} else {
int lastComparison = 0;
lastComparison = Boolean.valueOf(this.isSetId()).compareTo(other.isSetId());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetId()) {
lastComparison = TBaseHelper.compareTo(this.id, other.id);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(this.isSetName()).compareTo(other.isSetName());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetName()) {
lastComparison = TBaseHelper.compareTo(this.name, other.name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(this.isSetCreated_at()).compareTo(other.isSetCreated_at());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetCreated_at()) {
lastComparison = TBaseHelper.compareTo(this.created_at, other.created_at);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(this.isSetActive()).compareTo(other.isSetActive());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetActive()) {
lastComparison = TBaseHelper.compareTo(this.active, other.active);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(this.isSetGroups()).compareTo(other.isSetGroups());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetGroups()) {
lastComparison = TBaseHelper.compareTo(this.groups, other.groups);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(this.isSetMap_values()).compareTo(other.isSetMap_values());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetMap_values()) {
lastComparison = TBaseHelper.compareTo(this.map_values, other.map_values);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(this.isSetSet_values()).compareTo(other.isSetSet_values());
if (lastComparison != 0) {
return lastComparison;
} else {
if (this.isSetSet_values()) {
lastComparison = TBaseHelper.compareTo(this.set_values, other.set_values);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
}
}
}
}
}
}
}
}
public ThriftSampleData._Fields fieldForId(int fieldId) {
return ThriftSampleData._Fields.findByThriftId(fieldId);
}
public void read(TProtocol iprot)
throws TException {
((SchemeFactory) schemes.get(iprot.getScheme())).getScheme().read(iprot, this);
}
public void write(TProtocol oprot)
throws TException {
((SchemeFactory) schemes.get(oprot.getScheme())).getScheme().write(oprot, this);
}
public String toString() {
StringBuilder sb = new StringBuilder("ThriftSampleData(");
boolean first = true;
if (this.isSetId()) {
sb.append("id:");
sb.append(this.id);
first = false;
}
if (this.isSetName()) {
if (!first) {
sb.append(", ");
}
sb.append("name:");
if (this.name == null) {
sb.append("null");
} else {
sb.append(this.name);
}
first = false;
}
if (this.isSetCreated_at()) {
if (!first) {
sb.append(", ");
}
sb.append("created_at:");
sb.append(this.created_at);
first = false;
}
if (this.isSetActive()) {
if (!first) {
sb.append(", ");
}
sb.append("active:");
sb.append(this.active);
first = false;
}
if (!first) {
sb.append(", ");
}
sb.append("groups:");
if (this.groups == null) {
sb.append("null");
} else {
sb.append(this.groups);
}
first = false;
if (!first) {
sb.append(", ");
}
sb.append("map_values:");
if (this.map_values == null) {
sb.append("null");
} else {
sb.append(this.map_values);
}
first = false;
if (!first) {
sb.append(", ");
}
sb.append("set_values:");
if (this.set_values == null) {
sb.append("null");
} else {
sb.append(this.set_values);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate()
throws TException {
}
private void writeObject(ObjectOutputStream out)
throws IOException {
try {
this.write(new TCompactProtocol(new TIOStreamTransport(out)));
} catch (TException var3) {
throw new IOException(var3);
}
}
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException {
try {
this.__isset_bitfield = 0;
this.read(new TCompactProtocol(new TIOStreamTransport(in)));
} catch (TException var3) {
throw new IOException(var3);
}
}
static {
schemes.put(StandardScheme.class, new ThriftSampleData.ThriftSampleData1StandardSchemeFactory());
schemes.put(TupleScheme.class, new ThriftSampleData.ThriftSampleData1TupleSchemeFactory());
optionals =
new ThriftSampleData._Fields[]{ThriftSampleData._Fields.ID, ThriftSampleData._Fields.NAME, ThriftSampleData._Fields.CREATED_AT, ThriftSampleData._Fields.ACTIVE};
Map<ThriftSampleData._Fields, FieldMetaData> tmpMap = new EnumMap(ThriftSampleData._Fields.class);
tmpMap.put(ThriftSampleData._Fields.ID, new FieldMetaData("id", (byte) 2, new FieldValueMetaData((byte) 8)));
tmpMap.put(ThriftSampleData._Fields.NAME, new FieldMetaData("name", (byte) 2, new FieldValueMetaData((byte) 11)));
tmpMap.put(ThriftSampleData._Fields.CREATED_AT,
new FieldMetaData("created_at", (byte) 2, new FieldValueMetaData((byte) 10)));
tmpMap
.put(ThriftSampleData._Fields.ACTIVE, new FieldMetaData("active", (byte) 2, new FieldValueMetaData((byte) 2)));
tmpMap.put(ThriftSampleData._Fields.GROUPS,
new FieldMetaData("groups", (byte) 3, new ListMetaData((byte) 15, new FieldValueMetaData((byte) 6))));
tmpMap.put(ThriftSampleData._Fields.MAP_VALUES, new FieldMetaData("map_values", (byte) 3,
new MapMetaData((byte) 13, new FieldValueMetaData((byte) 11), new FieldValueMetaData((byte) 10))));
tmpMap.put(ThriftSampleData._Fields.SET_VALUES,
new FieldMetaData("set_values", (byte) 3, new SetMetaData((byte) 14, new FieldValueMetaData((byte) 11))));
metaDataMap = Collections.unmodifiableMap(tmpMap);
FieldMetaData.addStructMetaDataMap(ThriftSampleData.class, metaDataMap);
}
private static class ThriftSampleData1TupleScheme extends TupleScheme<ThriftSampleData> {
private ThriftSampleData1TupleScheme() {
}
public void write(TProtocol prot, ThriftSampleData struct)
throws TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetId()) {
optionals.set(0);
}
if (struct.isSetName()) {
optionals.set(1);
}
if (struct.isSetCreated_at()) {
optionals.set(2);
}
if (struct.isSetActive()) {
optionals.set(3);
}
if (struct.isSetGroups()) {
optionals.set(4);
}
if (struct.isSetMap_values()) {
optionals.set(5);
}
if (struct.isSetSet_values()) {
optionals.set(6);
}
oprot.writeBitSet(optionals, 7);
if (struct.isSetId()) {
oprot.writeI32(struct.id);
}
if (struct.isSetName()) {
oprot.writeString(struct.name);
}
if (struct.isSetCreated_at()) {
oprot.writeI64(struct.created_at);
}
if (struct.isSetActive()) {
oprot.writeBool(struct.active);
}
Iterator var5;
if (struct.isSetGroups()) {
oprot.writeI32(struct.groups.size());
var5 = struct.groups.iterator();
while (var5.hasNext()) {
short _iter13 = ((Short) var5.next()).shortValue();
oprot.writeI16(_iter13);
}
}
if (struct.isSetMap_values()) {
oprot.writeI32(struct.map_values.size());
var5 = struct.map_values.entrySet().iterator();
while (var5.hasNext()) {
Entry<String, Long> _iter14 = (Entry) var5.next();
oprot.writeString((String) _iter14.getKey());
oprot.writeI64(((Long) _iter14.getValue()).longValue());
}
}
if (struct.isSetSet_values()) {
oprot.writeI32(struct.set_values.size());
var5 = struct.set_values.iterator();
while (var5.hasNext()) {
String _iter15 = (String) var5.next();
oprot.writeString(_iter15);
}
}
}
public void read(TProtocol prot, ThriftSampleData struct)
throws TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(7);
if (incoming.get(0)) {
struct.id = iprot.readI32();
struct.setIdIsSet(true);
}
if (incoming.get(1)) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
}
if (incoming.get(2)) {
struct.created_at = iprot.readI64();
struct.setCreated_atIsSet(true);
}
if (incoming.get(3)) {
struct.active = iprot.readBool();
struct.setActiveIsSet(true);
}
int _i25;
if (incoming.get(4)) {
TList _list16 = new TList((byte) 6, iprot.readI32());
struct.groups = new ArrayList(_list16.size);
for (_i25 = 0; _i25 < _list16.size; ++_i25) {
short _elem17 = iprot.readI16();
struct.groups.add(_elem17);
}
struct.setGroupsIsSet(true);
}
String _elem24;
if (incoming.get(5)) {
TMap _map19 = new TMap((byte) 11, (byte) 10, iprot.readI32());
struct.map_values = new HashMap(2 * _map19.size);
for (int _i22 = 0; _i22 < _map19.size; ++_i22) {
_elem24 = iprot.readString();
long _val21 = iprot.readI64();
struct.map_values.put(_elem24, _val21);
}
struct.setMap_valuesIsSet(true);
}
if (incoming.get(6)) {
TSet _set23 = new TSet((byte) 11, iprot.readI32());
struct.set_values = new HashSet(2 * _set23.size);
for (_i25 = 0; _i25 < _set23.size; ++_i25) {
_elem24 = iprot.readString();
struct.set_values.add(_elem24);
}
struct.setSet_valuesIsSet(true);
}
}
}
private static class ThriftSampleData1TupleSchemeFactory implements SchemeFactory {
private ThriftSampleData1TupleSchemeFactory() {
}
public ThriftSampleData.ThriftSampleData1TupleScheme getScheme() {
return new ThriftSampleData.ThriftSampleData1TupleScheme();
}
}
private static class ThriftSampleData1StandardScheme extends StandardScheme<ThriftSampleData> {
private ThriftSampleData1StandardScheme() {
}
public void read(TProtocol iprot, ThriftSampleData struct)
throws TException {
iprot.readStructBegin();
while (true) {
TField schemeField = iprot.readFieldBegin();
if (schemeField.type == 0) {
iprot.readStructEnd();
struct.validate();
return;
}
String _elem8;
int _i9;
switch (schemeField.id) {
case 1:
if (schemeField.type == 8) {
struct.id = iprot.readI32();
struct.setIdIsSet(true);
} else {
TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2:
if (schemeField.type == 11) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
} else {
TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3:
if (schemeField.type == 10) {
struct.created_at = iprot.readI64();
struct.setCreated_atIsSet(true);
} else {
TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4:
if (schemeField.type == 2) {
struct.active = iprot.readBool();
struct.setActiveIsSet(true);
} else {
TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5:
if (schemeField.type != 15) {
TProtocolUtil.skip(iprot, schemeField.type);
break;
}
TList _list0 = iprot.readListBegin();
struct.groups = new ArrayList(_list0.size);
for (_i9 = 0; _i9 < _list0.size; ++_i9) {
short _elem1 = iprot.readI16();
struct.groups.add(_elem1);
}
iprot.readListEnd();
struct.setGroupsIsSet(true);
break;
case 6:
if (schemeField.type != 13) {
TProtocolUtil.skip(iprot, schemeField.type);
break;
}
TMap _map3 = iprot.readMapBegin();
struct.map_values = new HashMap(2 * _map3.size);
for (int _i6 = 0; _i6 < _map3.size; ++_i6) {
_elem8 = iprot.readString();
long _val5 = iprot.readI64();
struct.map_values.put(_elem8, _val5);
}
iprot.readMapEnd();
struct.setMap_valuesIsSet(true);
break;
case 7:
if (schemeField.type != 14) {
TProtocolUtil.skip(iprot, schemeField.type);
break;
}
TSet _set7 = iprot.readSetBegin();
struct.set_values = new HashSet(2 * _set7.size);
for (_i9 = 0; _i9 < _set7.size; ++_i9) {
_elem8 = iprot.readString();
struct.set_values.add(_elem8);
}
iprot.readSetEnd();
struct.setSet_valuesIsSet(true);
break;
default:
TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
}
public void write(TProtocol oprot, ThriftSampleData struct)
throws TException {
struct.validate();
oprot.writeStructBegin(ThriftSampleData.STRUCT_DESC);
if (struct.isSetId()) {
oprot.writeFieldBegin(ThriftSampleData.ID_FIELD_DESC);
oprot.writeI32(struct.id);
oprot.writeFieldEnd();
}
if (struct.name != null && struct.isSetName()) {
oprot.writeFieldBegin(ThriftSampleData.NAME_FIELD_DESC);
oprot.writeString(struct.name);
oprot.writeFieldEnd();
}
if (struct.isSetCreated_at()) {
oprot.writeFieldBegin(ThriftSampleData.CREATED_AT_FIELD_DESC);
oprot.writeI64(struct.created_at);
oprot.writeFieldEnd();
}
if (struct.isSetActive()) {
oprot.writeFieldBegin(ThriftSampleData.ACTIVE_FIELD_DESC);
oprot.writeBool(struct.active);
oprot.writeFieldEnd();
}
Iterator var3;
if (struct.groups != null) {
oprot.writeFieldBegin(ThriftSampleData.GROUPS_FIELD_DESC);
oprot.writeListBegin(new TList((byte) 6, struct.groups.size()));
var3 = struct.groups.iterator();
while (var3.hasNext()) {
short _iter10 = ((Short) var3.next()).shortValue();
oprot.writeI16(_iter10);
}
oprot.writeListEnd();
oprot.writeFieldEnd();
}
if (struct.map_values != null) {
oprot.writeFieldBegin(ThriftSampleData.MAP_VALUES_FIELD_DESC);
oprot.writeMapBegin(new TMap((byte) 11, (byte) 10, struct.map_values.size()));
var3 = struct.map_values.entrySet().iterator();
while (var3.hasNext()) {
Entry<String, Long> _iter11 = (Entry) var3.next();
oprot.writeString((String) _iter11.getKey());
oprot.writeI64(((Long) _iter11.getValue()).longValue());
}
oprot.writeMapEnd();
oprot.writeFieldEnd();
}
if (struct.set_values != null) {
oprot.writeFieldBegin(ThriftSampleData.SET_VALUES_FIELD_DESC);
oprot.writeSetBegin(new TSet((byte) 11, struct.set_values.size()));
var3 = struct.set_values.iterator();
while (var3.hasNext()) {
String _iter12 = (String) var3.next();
oprot.writeString(_iter12);
}
oprot.writeSetEnd();
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ThriftSampleData1StandardSchemeFactory implements SchemeFactory {
private ThriftSampleData1StandardSchemeFactory() {
}
public ThriftSampleData.ThriftSampleData1StandardScheme getScheme() {
return new ThriftSampleData.ThriftSampleData1StandardScheme();
}
}
public static enum _Fields implements TFieldIdEnum {
ID((short) 1, "id"),
NAME((short) 2, "name"),
CREATED_AT((short) 3, "created_at"),
ACTIVE((short) 4, "active"),
GROUPS((short) 5, "groups"),
MAP_VALUES((short) 6, "map_values"),
SET_VALUES((short) 7, "set_values");
private static final Map<String, ThriftSampleData._Fields> byName = new HashMap();
private final short _thriftId;
private final String _fieldName;
public static ThriftSampleData._Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1:
return ID;
case 2:
return NAME;
case 3:
return CREATED_AT;
case 4:
return ACTIVE;
case 5:
return GROUPS;
case 6:
return MAP_VALUES;
case 7:
return SET_VALUES;
default:
return null;
}
}
public static ThriftSampleData._Fields findByThriftIdOrThrow(int fieldId) {
ThriftSampleData._Fields fields = findByThriftId(fieldId);
if (fields == null) {
throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
} else {
return fields;
}
}
public static ThriftSampleData._Fields findByName(String name) {
return (ThriftSampleData._Fields) byName.get(name);
}
private _Fields(short thriftId, String fieldName) {
this._thriftId = thriftId;
this._fieldName = fieldName;
}
public short getThriftFieldId() {
return this._thriftId;
}
public String getFieldName() {
return this._fieldName;
}
static {
Iterator var0 = EnumSet.allOf(ThriftSampleData._Fields.class).iterator();
while (var0.hasNext()) {
ThriftSampleData._Fields field = (ThriftSampleData._Fields) var0.next();
byName.put(field.getFieldName(), field);
}
}
}
}
|
googleapis/google-cloud-java | 36,061 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyNetworkAttachmentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for NetworkAttachments.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest}
*/
public final class GetIamPolicyNetworkAttachmentRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest)
GetIamPolicyNetworkAttachmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetIamPolicyNetworkAttachmentRequest.newBuilder() to construct.
private GetIamPolicyNetworkAttachmentRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetIamPolicyNetworkAttachmentRequest() {
project_ = "";
region_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetIamPolicyNetworkAttachmentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNetworkAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNetworkAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.class,
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.Builder.class);
}
private int bitField0_;
public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029;
private int optionsRequestedPolicyVersion_ = 0;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
@SuppressWarnings("serial")
private volatile java.lang.Object region_ = "";
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(499220029, optionsRequestedPolicyVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
499220029, optionsRequestedPolicyVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest other =
(com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest) obj;
if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion())
return false;
if (hasOptionsRequestedPolicyVersion()) {
if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion())
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOptionsRequestedPolicyVersion()) {
hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getOptionsRequestedPolicyVersion();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for NetworkAttachments.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest)
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNetworkAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNetworkAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.class,
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
optionsRequestedPolicyVersion_ = 0;
project_ = "";
region_ = "";
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNetworkAttachmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest build() {
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest buildPartial() {
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest result =
new com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.region_ = region_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.resource_ = resource_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest other) {
if (other
== com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest.getDefaultInstance())
return this;
if (other.hasOptionsRequestedPolicyVersion()) {
setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1111570338:
{
region_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1111570338
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -301207064:
{
optionsRequestedPolicyVersion_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -301207064
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int optionsRequestedPolicyVersion_;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @param value The optionsRequestedPolicyVersion to set.
* @return This builder for chaining.
*/
public Builder setOptionsRequestedPolicyVersion(int value) {
optionsRequestedPolicyVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return This builder for chaining.
*/
public Builder clearOptionsRequestedPolicyVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
optionsRequestedPolicyVersion_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest)
private static final com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest();
}
public static com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetIamPolicyNetworkAttachmentRequest> PARSER =
new com.google.protobuf.AbstractParser<GetIamPolicyNetworkAttachmentRequest>() {
@java.lang.Override
public GetIamPolicyNetworkAttachmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetIamPolicyNetworkAttachmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetIamPolicyNetworkAttachmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNetworkAttachmentRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,061 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyServiceAttachmentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for ServiceAttachments.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest}
*/
public final class GetIamPolicyServiceAttachmentRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest)
GetIamPolicyServiceAttachmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetIamPolicyServiceAttachmentRequest.newBuilder() to construct.
private GetIamPolicyServiceAttachmentRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetIamPolicyServiceAttachmentRequest() {
project_ = "";
region_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetIamPolicyServiceAttachmentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyServiceAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyServiceAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.class,
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.Builder.class);
}
private int bitField0_;
public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029;
private int optionsRequestedPolicyVersion_ = 0;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
@SuppressWarnings("serial")
private volatile java.lang.Object region_ = "";
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(499220029, optionsRequestedPolicyVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
499220029, optionsRequestedPolicyVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest other =
(com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest) obj;
if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion())
return false;
if (hasOptionsRequestedPolicyVersion()) {
if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion())
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOptionsRequestedPolicyVersion()) {
hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getOptionsRequestedPolicyVersion();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for ServiceAttachments.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest)
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyServiceAttachmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyServiceAttachmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.class,
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
optionsRequestedPolicyVersion_ = 0;
project_ = "";
region_ = "";
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyServiceAttachmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest build() {
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest buildPartial() {
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest result =
new com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.region_ = region_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.resource_ = resource_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest other) {
if (other
== com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest.getDefaultInstance())
return this;
if (other.hasOptionsRequestedPolicyVersion()) {
setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1111570338:
{
region_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1111570338
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -301207064:
{
optionsRequestedPolicyVersion_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -301207064
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int optionsRequestedPolicyVersion_;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @param value The optionsRequestedPolicyVersion to set.
* @return This builder for chaining.
*/
public Builder setOptionsRequestedPolicyVersion(int value) {
optionsRequestedPolicyVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return This builder for chaining.
*/
public Builder clearOptionsRequestedPolicyVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
optionsRequestedPolicyVersion_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest)
private static final com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest();
}
public static com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetIamPolicyServiceAttachmentRequest> PARSER =
new com.google.protobuf.AbstractParser<GetIamPolicyServiceAttachmentRequest>() {
@java.lang.Override
public GetIamPolicyServiceAttachmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetIamPolicyServiceAttachmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetIamPolicyServiceAttachmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyServiceAttachmentRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,136 | truffle/src/com.oracle.truffle.api.bytecode.test/src/com/oracle/truffle/api/bytecode/test/InstrumentationTest.java | /*
* Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.api.bytecode.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import org.graalvm.polyglot.Context;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.ContextThreadLocal;
import com.oracle.truffle.api.RootCallTarget;
import com.oracle.truffle.api.TruffleLanguage;
import com.oracle.truffle.api.bytecode.BytecodeConfig;
import com.oracle.truffle.api.bytecode.BytecodeLocal;
import com.oracle.truffle.api.bytecode.BytecodeLocation;
import com.oracle.truffle.api.bytecode.BytecodeParser;
import com.oracle.truffle.api.bytecode.BytecodeRootNode;
import com.oracle.truffle.api.bytecode.BytecodeRootNodes;
import com.oracle.truffle.api.bytecode.GenerateBytecode;
import com.oracle.truffle.api.bytecode.Instrumentation;
import com.oracle.truffle.api.bytecode.Operation;
import com.oracle.truffle.api.bytecode.Variadic;
import com.oracle.truffle.api.bytecode.test.InstrumentationTest.InstrumentationTestRootNode.InstrumentationDecrement;
import com.oracle.truffle.api.bytecode.test.InstrumentationTest.InstrumentationTestRootNode.PointInstrumentation1;
import com.oracle.truffle.api.bytecode.test.InstrumentationTest.InstrumentationTestRootNode.PointInstrumentation2;
import com.oracle.truffle.api.bytecode.test.InstrumentationTest.InstrumentationTestRootNode.PointInstrumentationRecursive1;
import com.oracle.truffle.api.bytecode.test.InstrumentationTest.InstrumentationTestRootNode.PointInstrumentationRecursive2;
import com.oracle.truffle.api.bytecode.test.error_tests.ExpectError;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.instrumentation.ProvidedTags;
import com.oracle.truffle.api.instrumentation.StandardTags;
import com.oracle.truffle.api.nodes.DirectCallNode;
public class InstrumentationTest extends AbstractInstructionTest {
private static InstrumentationTestRootNode parse(BytecodeParser<InstrumentationTestRootNodeGen.Builder> parser) {
BytecodeRootNodes<InstrumentationTestRootNode> nodes = InstrumentationTestRootNodeGen.create(BytecodeInstrumentationTestLanguage.REF.get(null), BytecodeConfig.WITH_SOURCE, parser);
return nodes.getNodes().get(nodes.getNodes().size() - 1);
}
Context context;
@Before
public void setup() {
context = Context.create(BytecodeInstrumentationTestLanguage.ID);
context.initialize(BytecodeInstrumentationTestLanguage.ID);
context.enter();
}
@After
public void tearDown() {
context.close();
}
@Test
public void testPointInstrumentation1() {
InstrumentationTestRootNode node = parse((b) -> {
b.beginRoot();
b.emitPointInstrumentation1();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertTrue(d.events.isEmpty());
});
b.endRunAsserts();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentation1.class);
b.endEnableInstrumentation();
b.emitPointInstrumentation1();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertEquals(1, d.events.size());
assertEquals(PointInstrumentation1.class, d.events.get(0));
});
b.endRunAsserts();
b.beginReturn();
b.emitLoadConstant(42);
b.endReturn();
b.endRoot();
});
assertEquals(42, node.getCallTarget().call());
}
@Test
public void testPointInstrumentation2() {
InstrumentationTestRootNode node = parse((b) -> {
b.beginRoot();
b.emitPointInstrumentation1();
b.emitPointInstrumentation2();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertTrue(d.events.isEmpty());
});
b.endRunAsserts();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentation1.class);
b.endEnableInstrumentation();
b.emitPointInstrumentation1();
b.emitPointInstrumentation2();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertEquals(1, d.events.size());
assertEquals(PointInstrumentation1.class, d.events.get(0));
});
b.endRunAsserts();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentation2.class);
b.endEnableInstrumentation();
b.emitPointInstrumentation1();
b.emitPointInstrumentation2();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertEquals(3, d.events.size());
assertEquals(PointInstrumentation1.class, d.events.get(0));
assertEquals(PointInstrumentation1.class, d.events.get(1));
assertEquals(PointInstrumentation2.class, d.events.get(2));
});
b.endRunAsserts();
b.beginReturn();
b.emitLoadConstant(42);
b.endReturn();
b.endRoot();
});
assertEquals(42, node.getCallTarget().call());
}
/*
* Tests behavior when instruments are attached added in instruments.
*/
@Test
public void testPointInstrumentationRecursive() {
InstrumentationTestRootNode node = parse((b) -> {
b.beginRoot();
b.emitPointInstrumentation1();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentationRecursive1.class);
b.endEnableInstrumentation();
b.emitPointInstrumentationRecursive1();
b.emitPointInstrumentation1();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertEquals(2, d.events.size());
assertEquals(PointInstrumentationRecursive1.class, d.events.get(0));
assertEquals(PointInstrumentation1.class, d.events.get(1));
});
b.endRunAsserts();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentationRecursive2.class);
b.endEnableInstrumentation();
b.emitPointInstrumentationRecursive2();
// this bytecode should be skipped
b.emitPointInstrumentation2();
// the second invocation triggers PointInstrumentation2
b.emitPointInstrumentationRecursive2();
// after transition we should continue here
// we must remember which instrumentation instruction triggered the transition
b.emitPointInstrumentation2();
b.emitPointInstrumentationRecursive1();
b.emitPointInstrumentation1();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertEquals(7, d.events.size());
assertEquals(PointInstrumentationRecursive1.class, d.events.get(0));
assertEquals(PointInstrumentation1.class, d.events.get(1));
assertEquals(PointInstrumentationRecursive2.class, d.events.get(2));
assertEquals(PointInstrumentationRecursive2.class, d.events.get(3));
assertEquals(PointInstrumentation2.class, d.events.get(4));
assertEquals(PointInstrumentationRecursive1.class, d.events.get(5));
assertEquals(PointInstrumentation1.class, d.events.get(6));
});
b.endRunAsserts();
b.beginReturn();
b.emitLoadConstant(42);
b.endReturn();
b.endRoot();
});
assertEquals(42, node.getCallTarget().call());
}
/*
* Verifies that boxing elimination does not crash when instrumentation is changed while
* executing quickened instructions.
*/
@Test
public void testBoxingElimination() {
InstrumentationTestRootNode node = parse((b) -> {
b.beginRoot();
BytecodeLocal l = b.createLocal();
b.beginStoreLocal(l);
b.emitLoadConstant(6);
b.endStoreLocal();
b.beginWhile();
b.beginIsNot();
b.emitLoadLocal(l);
b.emitLoadConstant(0);
b.endIsNot();
b.beginBlock();
b.beginStoreLocal(l);
b.beginBlock();
b.beginInstrumentationDecrement();
// enabling the instrumentation with values on the stack is not super straight forward
// the easiest way is to enable it as a side effect of a stackful operation.
b.beginDecrementEnableInstrumentationIf4();
b.emitLoadLocal(l);
b.endDecrementEnableInstrumentationIf4();
b.endInstrumentationDecrement();
b.endBlock();
b.endStoreLocal();
b.endBlock();
b.endWhile();
b.beginRunAsserts();
b.emitLoadConstant((Consumer<ThreadLocalData>) (d) -> {
assertEquals(2, d.events.size());
assertEquals(InstrumentationDecrement.class, d.events.get(0));
assertEquals(3, d.operands.get(0));
assertEquals(InstrumentationDecrement.class, d.events.get(1));
assertEquals(1, d.operands.get(1));
});
b.endRunAsserts();
b.beginReturn();
b.emitLoadLocal(l);
b.endReturn();
b.endRoot();
});
node.getBytecodeNode().setUncachedThreshold(0);
assertEquals(0, node.getCallTarget().call());
}
@Test
public void testCachedTagsPreservedInInstrumentation() {
InstrumentationTestRootNode node = parse((b) -> {
b.beginRoot();
BytecodeLocal l = b.createLocal();
b.beginStoreLocal(l);
b.emitLoadConstant(6);
b.endStoreLocal();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentation1.class);
b.endEnableInstrumentation();
b.beginReturn();
b.emitLoadLocal(l);
b.endReturn();
b.endRoot();
});
node.getBytecodeNode().setUncachedThreshold(0);
assertEquals(6, node.getCallTarget().call());
assertEquals("load.local$Int", node.getBytecodeNode().getInstructionsAsList().get(4).getName());
assertEquals(6, node.getCallTarget().call());
assertEquals("load.local$Int", node.getBytecodeNode().getInstructionsAsList().get(4).getName());
}
@Test
public void testCachedTagsPreservedInInstrumentationWithSplitting() {
InstrumentationTestRootNode node = parse((b) -> {
b.beginRoot();
BytecodeLocal l = b.createLocal();
b.beginStoreLocal(l);
b.emitLoadConstant(6);
b.endStoreLocal();
b.beginEnableInstrumentation();
b.emitLoadConstant(PointInstrumentation1.class);
b.endEnableInstrumentation();
b.beginReturn();
b.emitLoadLocal(l);
b.endReturn();
b.endRoot();
});
node.getBytecodeNode().setUncachedThreshold(0);
DirectCallNode cn = DirectCallNode.create(node.getCallTarget());
// cloning only supported in optimizing runtimes
Assume.assumeTrue(cn.cloneCallTarget());
RootCallTarget clone = (RootCallTarget) cn.getClonedCallTarget();
InstrumentationTestRootNode clonedNode = (InstrumentationTestRootNode) clone.getRootNode();
clonedNode.getBytecodeNode().setUncachedThreshold(0);
assertEquals(6, clone.call());
assertEquals("load.local$Int", clonedNode.getBytecodeNode().getInstructionsAsList().get(4).getName());
assertEquals(6, clone.call());
assertEquals("load.local$Int", clonedNode.getBytecodeNode().getInstructionsAsList().get(4).getName());
}
@GenerateBytecode(languageClass = BytecodeInstrumentationTestLanguage.class, //
enableQuickening = true, //
enableUncachedInterpreter = true, //
boxingEliminationTypes = {int.class})
public abstract static class InstrumentationTestRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected InstrumentationTestRootNode(BytecodeInstrumentationTestLanguage language,
FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Operation
static final class EnableInstrumentation {
@Specialization
@TruffleBoundary
public static void doDefault(Class<?> instrumentationClass,
@Bind BytecodeLocation location) {
location.getBytecodeNode().getBytecodeRootNode().getRootNodes().update(InstrumentationTestRootNodeGen.newConfigBuilder().addInstrumentation(instrumentationClass).build());
}
}
@Operation
static final class RunAsserts {
@SuppressWarnings("unchecked")
@Specialization
@TruffleBoundary
public static void doDefault(Consumer<?> consumer,
@Bind InstrumentationTestRootNode root) {
((Consumer<ThreadLocalData>) consumer).accept(root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get());
}
}
@Instrumentation
static final class PointInstrumentation1 {
@Specialization
public static void doDefault(@Bind InstrumentationTestRootNode root) {
root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get().add(PointInstrumentation1.class, null);
}
}
@Instrumentation
static final class PointInstrumentation2 {
@Specialization
public static void doDefault(@Bind InstrumentationTestRootNode root) {
root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get().add(PointInstrumentation2.class, null);
}
}
@Instrumentation
static final class PointInstrumentationRecursive1 {
@Specialization
public static void doDefault(@Bind InstrumentationTestRootNode root) {
root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get().add(PointInstrumentationRecursive1.class, null);
root.getRootNodes().update(InstrumentationTestRootNodeGen.newConfigBuilder().addInstrumentation(PointInstrumentation1.class).build());
}
}
@Instrumentation
static final class PointInstrumentationRecursive2 {
@Specialization
public static void doDefault(@Bind InstrumentationTestRootNode root) {
ThreadLocalData tl = root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get();
tl.add(PointInstrumentationRecursive2.class, null);
if (tl.pointInstrumentationRecursive2Counter <= 0) {
root.getRootNodes().update(InstrumentationTestRootNodeGen.newConfigBuilder().addInstrumentation(PointInstrumentation2.class).build());
}
tl.pointInstrumentationRecursive2Counter--;
}
}
@Instrumentation
static final class InstrumentationOperandReturn {
@Specialization
public static Object doDefault(Object operand, @Bind InstrumentationTestRootNode root) {
root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get().add(InstrumentationOperandReturn.class, operand);
return operand;
}
}
@Operation
static final class DecrementEnableInstrumentationIf4 {
@Specialization
public static int doInt(int operand, @Bind InstrumentationTestRootNode root) {
if (operand == 4) {
root.getRootNodes().update(InstrumentationTestRootNodeGen.newConfigBuilder().addInstrumentation(InstrumentationDecrement.class).build());
}
return operand - 1;
}
}
@Operation
static final class IsNot {
@Specialization
public static boolean doInt(int operand, int value) {
return operand != value;
}
}
@Operation
static final class Is {
@Specialization
public static boolean doInt(int operand, int value) {
return operand == value;
}
}
@Instrumentation
static final class InstrumentationDecrement {
@Specialization
public static int doInt(int operand, @Bind InstrumentationTestRootNode root) {
root.getLanguage(BytecodeInstrumentationTestLanguage.class).threadLocal.get().add(InstrumentationDecrement.class, operand);
return operand - 1;
}
}
}
static class ThreadLocalData {
final List<Class<?>> events = new ArrayList<>();
final List<Object> operands = new ArrayList<>();
private int pointInstrumentationRecursive2Counter = 1;
@TruffleBoundary
void add(Class<?> c, Object operand) {
events.add(c);
operands.add(operand);
}
}
@TruffleLanguage.Registration(id = BytecodeInstrumentationTestLanguage.ID)
@ProvidedTags(StandardTags.ExpressionTag.class)
public static class BytecodeInstrumentationTestLanguage extends TruffleLanguage<Object> {
public static final String ID = "bytecode_BytecodeInstrumentationTestLanguage";
final ContextThreadLocal<ThreadLocalData> threadLocal = this.locals.createContextThreadLocal((c, t) -> new ThreadLocalData());
@Override
protected Object createContext(Env env) {
return new Object();
}
static final LanguageReference<BytecodeInstrumentationTestLanguage> REF = LanguageReference.create(BytecodeInstrumentationTestLanguage.class);
}
@GenerateBytecode(languageClass = BytecodeInstrumentationTestLanguage.class)
public abstract static class InstrumentationErrorRootNode1 extends DebugBytecodeRootNode implements BytecodeRootNode {
protected InstrumentationErrorRootNode1(BytecodeInstrumentationTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Operation
static final class Identity {
@Specialization
public static Object doDefault(Object operand) {
return operand;
}
}
// assert no error
@Instrumentation
static final class ValidInstrumentation1 {
@Specialization
public static void doInt() {
}
}
// assert no error
@Instrumentation
static final class ValidInstrumentation2 {
@Specialization
public static Object doInt(Object arg) {
return arg;
}
}
@ExpectError("An @Instrumentation operation cannot have more than one dynamic operand. " +
"Instrumentations must have transparent stack effects. " + //
"Remove the additional operands to resolve this.")
@Instrumentation
static final class InvalidInstrumentation1 {
@Specialization
public static boolean doInt(int operand, int value) {
return operand == value;
}
}
@ExpectError("An @Instrumentation operation cannot have a return value without also specifying a single dynamic operand. " + //
"Instrumentations must have transparent stack effects. " + //
"Use void as the return type or specify a single dynamic operand value to resolve this.")
@Instrumentation
static final class InvalidInstrumentation2 {
@Specialization
public static int doInt() {
return 42;
}
}
@ExpectError("An @Instrumentation operation cannot use @Variadic for its dynamic operand. " + //
"Instrumentations must have transparent stack effects. Remove the variadic annotation to resolve this.")
@Instrumentation
static final class InvalidInstrumentation3 {
@Specialization
public static int doInt(@SuppressWarnings("unused") @Variadic Object... args) {
return 42;
}
}
@ExpectError("An @Instrumentation operation cannot be void and also specify a dynamic operand. " + //
"Instrumentations must have transparent stack effects. " +
"Change the return type or remove the dynamic operand to resolve this.")
@Instrumentation
static final class InvalidInstrumentation4 {
@Specialization
public static void doInt(@SuppressWarnings("unused") Object arg) {
return;
}
}
}
@GenerateBytecode(languageClass = BytecodeInstrumentationTestLanguage.class, //
enableTagInstrumentation = true, //
enableRootBodyTagging = false, enableRootTagging = false)
public abstract static class ManyInstrumentationsRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected ManyInstrumentationsRootNode(BytecodeInstrumentationTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Operation
static final class Is {
@Specialization
public static boolean doInt(int operand, int value) {
return operand == value;
}
}
@Instrumentation
static final class Instrumentation1 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation2 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation3 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation4 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation5 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation6 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation7 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation8 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation9 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation10 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation11 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation12 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation13 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation14 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation15 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation16 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation17 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation18 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation19 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation20 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation21 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation22 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation23 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation24 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation25 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation26 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation27 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation28 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation29 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation30 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation31 {
@Specialization
public static void doDefault() {
}
}
}
@ExpectError("Too many @Instrumentation annotated operations specified. %")
@GenerateBytecode(languageClass = BytecodeInstrumentationTestLanguage.class, //
enableTagInstrumentation = true, //
enableRootBodyTagging = false, enableRootTagging = false)
public abstract static class TooManyInstrumentationsRootNode extends DebugBytecodeRootNode implements BytecodeRootNode {
protected TooManyInstrumentationsRootNode(BytecodeInstrumentationTestLanguage language, FrameDescriptor frameDescriptor) {
super(language, frameDescriptor);
}
@Operation
static final class Is {
@Specialization
public static boolean doInt(int operand, int value) {
return operand == value;
}
}
@Instrumentation
static final class Instrumentation1 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation2 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation3 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation4 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation5 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation6 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation7 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation8 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation9 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation10 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation11 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation12 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation13 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation14 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation15 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation16 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation17 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation18 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation19 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation20 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation21 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation22 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation23 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation24 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation25 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation26 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation27 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation28 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation29 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation30 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation31 {
@Specialization
public static void doDefault() {
}
}
@Instrumentation
static final class Instrumentation32 {
@Specialization
public static void doDefault() {
}
}
}
}
|
google/ExoPlayer | 36,322 | library/ui/src/main/java/com/google/android/exoplayer2/ui/DefaultTimeBar.java | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewParent;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction;
import androidx.annotation.ColorInt;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.util.Collections;
import java.util.Formatter;
import java.util.Locale;
import java.util.concurrent.CopyOnWriteArraySet;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A time bar that shows a current position, buffered position, duration and ad markers.
*
* <p>A DefaultTimeBar can be customized by setting attributes, as outlined below.
*
* <h2>Attributes</h2>
*
* The following attributes can be set on a DefaultTimeBar when used in a layout XML file:
*
* <ul>
* <li><b>{@code bar_height}</b> - Dimension for the height of the time bar.
* <ul>
* <li>Default: {@link #DEFAULT_BAR_HEIGHT_DP}
* </ul>
* <li><b>{@code touch_target_height}</b> - Dimension for the height of the area in which touch
* interactions with the time bar are handled. If no height is specified, this also determines
* the height of the view.
* <ul>
* <li>Default: {@link #DEFAULT_TOUCH_TARGET_HEIGHT_DP}
* </ul>
* <li><b>{@code ad_marker_width}</b> - Dimension for the width of any ad markers shown on the
* bar. Ad markers are superimposed on the time bar to show the times at which ads will play.
* <ul>
* <li>Default: {@link #DEFAULT_AD_MARKER_WIDTH_DP}
* </ul>
* <li><b>{@code scrubber_enabled_size}</b> - Dimension for the diameter of the circular scrubber
* handle when scrubbing is enabled but not in progress. Set to zero if no scrubber handle
* should be shown.
* <ul>
* <li>Default: {@link #DEFAULT_SCRUBBER_ENABLED_SIZE_DP}
* </ul>
* <li><b>{@code scrubber_disabled_size}</b> - Dimension for the diameter of the circular scrubber
* handle when scrubbing isn't enabled. Set to zero if no scrubber handle should be shown.
* <ul>
* <li>Default: {@link #DEFAULT_SCRUBBER_DISABLED_SIZE_DP}
* </ul>
* <li><b>{@code scrubber_dragged_size}</b> - Dimension for the diameter of the circular scrubber
* handle when scrubbing is in progress. Set to zero if no scrubber handle should be shown.
* <ul>
* <li>Default: {@link #DEFAULT_SCRUBBER_DRAGGED_SIZE_DP}
* </ul>
* <li><b>{@code scrubber_drawable}</b> - Optional reference to a drawable to draw for the
* scrubber handle. If set, this overrides the default behavior, which is to draw a circle for
* the scrubber handle.
* <li><b>{@code played_color}</b> - Color for the portion of the time bar representing media
* before the current playback position.
* <ul>
* <li>Corresponding method: {@link #setPlayedColor(int)}
* <li>Default: {@link #DEFAULT_PLAYED_COLOR}
* </ul>
* <li><b>{@code scrubber_color}</b> - Color for the scrubber handle.
* <ul>
* <li>Corresponding method: {@link #setScrubberColor(int)}
* <li>Default: {@link #DEFAULT_SCRUBBER_COLOR}
* </ul>
* <li><b>{@code buffered_color}</b> - Color for the portion of the time bar after the current
* played position up to the current buffered position.
* <ul>
* <li>Corresponding method: {@link #setBufferedColor(int)}
* <li>Default: {@link #DEFAULT_BUFFERED_COLOR}
* </ul>
* <li><b>{@code unplayed_color}</b> - Color for the portion of the time bar after the current
* buffered position.
* <ul>
* <li>Corresponding method: {@link #setUnplayedColor(int)}
* <li>Default: {@link #DEFAULT_UNPLAYED_COLOR}
* </ul>
* <li><b>{@code ad_marker_color}</b> - Color for unplayed ad markers.
* <ul>
* <li>Corresponding method: {@link #setAdMarkerColor(int)}
* <li>Default: {@link #DEFAULT_AD_MARKER_COLOR}
* </ul>
* <li><b>{@code played_ad_marker_color}</b> - Color for played ad markers.
* <ul>
* <li>Corresponding method: {@link #setPlayedAdMarkerColor(int)}
* <li>Default: {@link #DEFAULT_PLAYED_AD_MARKER_COLOR}
* </ul>
* </ul>
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@Deprecated
public class DefaultTimeBar extends View implements TimeBar {
/** Default height for the time bar, in dp. */
public static final int DEFAULT_BAR_HEIGHT_DP = 4;
/** Default height for the touch target, in dp. */
public static final int DEFAULT_TOUCH_TARGET_HEIGHT_DP = 26;
/** Default width for ad markers, in dp. */
public static final int DEFAULT_AD_MARKER_WIDTH_DP = 4;
/** Default diameter for the scrubber when enabled, in dp. */
public static final int DEFAULT_SCRUBBER_ENABLED_SIZE_DP = 12;
/** Default diameter for the scrubber when disabled, in dp. */
public static final int DEFAULT_SCRUBBER_DISABLED_SIZE_DP = 0;
/** Default diameter for the scrubber when dragged, in dp. */
public static final int DEFAULT_SCRUBBER_DRAGGED_SIZE_DP = 16;
/** Default color for the played portion of the time bar. */
public static final int DEFAULT_PLAYED_COLOR = 0xFFFFFFFF;
/** Default color for the unplayed portion of the time bar. */
public static final int DEFAULT_UNPLAYED_COLOR = 0x33FFFFFF;
/** Default color for the buffered portion of the time bar. */
public static final int DEFAULT_BUFFERED_COLOR = 0xCCFFFFFF;
/** Default color for the scrubber handle. */
public static final int DEFAULT_SCRUBBER_COLOR = 0xFFFFFFFF;
/** Default color for ad markers. */
public static final int DEFAULT_AD_MARKER_COLOR = 0xB2FFFF00;
/** Default color for played ad markers. */
public static final int DEFAULT_PLAYED_AD_MARKER_COLOR = 0x33FFFF00;
/** Vertical gravity for progress bar to be located at the center in the view. */
public static final int BAR_GRAVITY_CENTER = 0;
/** Vertical gravity for progress bar to be located at the bottom in the view. */
public static final int BAR_GRAVITY_BOTTOM = 1;
/** The threshold in dps above the bar at which touch events trigger fine scrub mode. */
private static final int FINE_SCRUB_Y_THRESHOLD_DP = -50;
/** The ratio by which times are reduced in fine scrub mode. */
private static final int FINE_SCRUB_RATIO = 3;
/**
* The time after which the scrubbing listener is notified that scrubbing has stopped after
* performing an incremental scrub using key input.
*/
private static final long STOP_SCRUBBING_TIMEOUT_MS = 1000;
private static final int DEFAULT_INCREMENT_COUNT = 20;
private static final float SHOWN_SCRUBBER_SCALE = 1.0f;
private static final float HIDDEN_SCRUBBER_SCALE = 0.0f;
/**
* The name of the Android SDK view that most closely resembles this custom view. Used as the
* class name for accessibility.
*/
private static final String ACCESSIBILITY_CLASS_NAME = "android.widget.SeekBar";
private final Rect seekBounds;
private final Rect progressBar;
private final Rect bufferedBar;
private final Rect scrubberBar;
private final Paint playedPaint;
private final Paint bufferedPaint;
private final Paint unplayedPaint;
private final Paint adMarkerPaint;
private final Paint playedAdMarkerPaint;
private final Paint scrubberPaint;
@Nullable private final Drawable scrubberDrawable;
private final int barHeight;
private final int touchTargetHeight;
private final int barGravity;
private final int adMarkerWidth;
private final int scrubberEnabledSize;
private final int scrubberDisabledSize;
private final int scrubberDraggedSize;
private final int scrubberPadding;
private final int fineScrubYThreshold;
private final StringBuilder formatBuilder;
private final Formatter formatter;
private final Runnable stopScrubbingRunnable;
private final CopyOnWriteArraySet<OnScrubListener> listeners;
private final Point touchPosition;
private final float density;
private int keyCountIncrement;
private long keyTimeIncrement;
private int lastCoarseScrubXPosition;
private @MonotonicNonNull Rect lastExclusionRectangle;
private ValueAnimator scrubberScalingAnimator;
private float scrubberScale;
private boolean scrubberPaddingDisabled;
private boolean scrubbing;
private long scrubPosition;
private long duration;
private long position;
private long bufferedPosition;
private int adGroupCount;
@Nullable private long[] adGroupTimesMs;
@Nullable private boolean[] playedAdGroups;
public DefaultTimeBar(Context context) {
this(context, null);
}
public DefaultTimeBar(Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public DefaultTimeBar(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, attrs);
}
public DefaultTimeBar(
Context context,
@Nullable AttributeSet attrs,
int defStyleAttr,
@Nullable AttributeSet timebarAttrs) {
this(context, attrs, defStyleAttr, timebarAttrs, 0);
}
// Suppress warnings due to usage of View methods in the constructor.
@SuppressWarnings("nullness:method.invocation")
public DefaultTimeBar(
Context context,
@Nullable AttributeSet attrs,
int defStyleAttr,
@Nullable AttributeSet timebarAttrs,
int defStyleRes) {
super(context, attrs, defStyleAttr);
seekBounds = new Rect();
progressBar = new Rect();
bufferedBar = new Rect();
scrubberBar = new Rect();
playedPaint = new Paint();
bufferedPaint = new Paint();
unplayedPaint = new Paint();
adMarkerPaint = new Paint();
playedAdMarkerPaint = new Paint();
scrubberPaint = new Paint();
scrubberPaint.setAntiAlias(true);
listeners = new CopyOnWriteArraySet<>();
touchPosition = new Point();
// Calculate the dimensions and paints for drawn elements.
Resources res = context.getResources();
DisplayMetrics displayMetrics = res.getDisplayMetrics();
density = displayMetrics.density;
fineScrubYThreshold = dpToPx(density, FINE_SCRUB_Y_THRESHOLD_DP);
int defaultBarHeight = dpToPx(density, DEFAULT_BAR_HEIGHT_DP);
int defaultTouchTargetHeight = dpToPx(density, DEFAULT_TOUCH_TARGET_HEIGHT_DP);
int defaultAdMarkerWidth = dpToPx(density, DEFAULT_AD_MARKER_WIDTH_DP);
int defaultScrubberEnabledSize = dpToPx(density, DEFAULT_SCRUBBER_ENABLED_SIZE_DP);
int defaultScrubberDisabledSize = dpToPx(density, DEFAULT_SCRUBBER_DISABLED_SIZE_DP);
int defaultScrubberDraggedSize = dpToPx(density, DEFAULT_SCRUBBER_DRAGGED_SIZE_DP);
if (timebarAttrs != null) {
TypedArray a =
context
.getTheme()
.obtainStyledAttributes(
timebarAttrs, R.styleable.DefaultTimeBar, defStyleAttr, defStyleRes);
try {
scrubberDrawable = a.getDrawable(R.styleable.DefaultTimeBar_scrubber_drawable);
if (scrubberDrawable != null) {
setDrawableLayoutDirection(scrubberDrawable);
defaultTouchTargetHeight =
Math.max(scrubberDrawable.getMinimumHeight(), defaultTouchTargetHeight);
}
barHeight =
a.getDimensionPixelSize(R.styleable.DefaultTimeBar_bar_height, defaultBarHeight);
touchTargetHeight =
a.getDimensionPixelSize(
R.styleable.DefaultTimeBar_touch_target_height, defaultTouchTargetHeight);
barGravity = a.getInt(R.styleable.DefaultTimeBar_bar_gravity, BAR_GRAVITY_CENTER);
adMarkerWidth =
a.getDimensionPixelSize(
R.styleable.DefaultTimeBar_ad_marker_width, defaultAdMarkerWidth);
scrubberEnabledSize =
a.getDimensionPixelSize(
R.styleable.DefaultTimeBar_scrubber_enabled_size, defaultScrubberEnabledSize);
scrubberDisabledSize =
a.getDimensionPixelSize(
R.styleable.DefaultTimeBar_scrubber_disabled_size, defaultScrubberDisabledSize);
scrubberDraggedSize =
a.getDimensionPixelSize(
R.styleable.DefaultTimeBar_scrubber_dragged_size, defaultScrubberDraggedSize);
int playedColor = a.getInt(R.styleable.DefaultTimeBar_played_color, DEFAULT_PLAYED_COLOR);
int scrubberColor =
a.getInt(R.styleable.DefaultTimeBar_scrubber_color, DEFAULT_SCRUBBER_COLOR);
int bufferedColor =
a.getInt(R.styleable.DefaultTimeBar_buffered_color, DEFAULT_BUFFERED_COLOR);
int unplayedColor =
a.getInt(R.styleable.DefaultTimeBar_unplayed_color, DEFAULT_UNPLAYED_COLOR);
int adMarkerColor =
a.getInt(R.styleable.DefaultTimeBar_ad_marker_color, DEFAULT_AD_MARKER_COLOR);
int playedAdMarkerColor =
a.getInt(
R.styleable.DefaultTimeBar_played_ad_marker_color, DEFAULT_PLAYED_AD_MARKER_COLOR);
playedPaint.setColor(playedColor);
scrubberPaint.setColor(scrubberColor);
bufferedPaint.setColor(bufferedColor);
unplayedPaint.setColor(unplayedColor);
adMarkerPaint.setColor(adMarkerColor);
playedAdMarkerPaint.setColor(playedAdMarkerColor);
} finally {
a.recycle();
}
} else {
barHeight = defaultBarHeight;
touchTargetHeight = defaultTouchTargetHeight;
barGravity = BAR_GRAVITY_CENTER;
adMarkerWidth = defaultAdMarkerWidth;
scrubberEnabledSize = defaultScrubberEnabledSize;
scrubberDisabledSize = defaultScrubberDisabledSize;
scrubberDraggedSize = defaultScrubberDraggedSize;
playedPaint.setColor(DEFAULT_PLAYED_COLOR);
scrubberPaint.setColor(DEFAULT_SCRUBBER_COLOR);
bufferedPaint.setColor(DEFAULT_BUFFERED_COLOR);
unplayedPaint.setColor(DEFAULT_UNPLAYED_COLOR);
adMarkerPaint.setColor(DEFAULT_AD_MARKER_COLOR);
playedAdMarkerPaint.setColor(DEFAULT_PLAYED_AD_MARKER_COLOR);
scrubberDrawable = null;
}
formatBuilder = new StringBuilder();
formatter = new Formatter(formatBuilder, Locale.getDefault());
stopScrubbingRunnable = () -> stopScrubbing(/* canceled= */ false);
if (scrubberDrawable != null) {
scrubberPadding = (scrubberDrawable.getMinimumWidth() + 1) / 2;
} else {
scrubberPadding =
(Math.max(scrubberDisabledSize, Math.max(scrubberEnabledSize, scrubberDraggedSize)) + 1)
/ 2;
}
scrubberScale = 1.0f;
scrubberScalingAnimator = new ValueAnimator();
scrubberScalingAnimator.addUpdateListener(
animation -> {
scrubberScale = (float) animation.getAnimatedValue();
invalidate(seekBounds);
});
duration = C.TIME_UNSET;
keyTimeIncrement = C.TIME_UNSET;
keyCountIncrement = DEFAULT_INCREMENT_COUNT;
setFocusable(true);
if (getImportantForAccessibility() == View.IMPORTANT_FOR_ACCESSIBILITY_AUTO) {
setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_YES);
}
}
/** Shows the scrubber handle. */
public void showScrubber() {
if (scrubberScalingAnimator.isStarted()) {
scrubberScalingAnimator.cancel();
}
scrubberPaddingDisabled = false;
scrubberScale = 1;
invalidate(seekBounds);
}
/**
* Shows the scrubber handle with animation.
*
* @param showAnimationDurationMs The duration for scrubber showing animation.
*/
public void showScrubber(long showAnimationDurationMs) {
if (scrubberScalingAnimator.isStarted()) {
scrubberScalingAnimator.cancel();
}
scrubberPaddingDisabled = false;
scrubberScalingAnimator.setFloatValues(scrubberScale, SHOWN_SCRUBBER_SCALE);
scrubberScalingAnimator.setDuration(showAnimationDurationMs);
scrubberScalingAnimator.start();
}
/** Hides the scrubber handle. */
public void hideScrubber(boolean disableScrubberPadding) {
if (scrubberScalingAnimator.isStarted()) {
scrubberScalingAnimator.cancel();
}
scrubberPaddingDisabled = disableScrubberPadding;
scrubberScale = 0;
invalidate(seekBounds);
}
/**
* Hides the scrubber handle with animation.
*
* @param hideAnimationDurationMs The duration for scrubber hiding animation.
*/
public void hideScrubber(long hideAnimationDurationMs) {
if (scrubberScalingAnimator.isStarted()) {
scrubberScalingAnimator.cancel();
}
scrubberScalingAnimator.setFloatValues(scrubberScale, HIDDEN_SCRUBBER_SCALE);
scrubberScalingAnimator.setDuration(hideAnimationDurationMs);
scrubberScalingAnimator.start();
}
/**
* Sets the color for the portion of the time bar representing media before the playback position.
*
* @param playedColor The color for the portion of the time bar representing media before the
* playback position.
*/
public void setPlayedColor(@ColorInt int playedColor) {
playedPaint.setColor(playedColor);
invalidate(seekBounds);
}
/**
* Sets the color for the scrubber handle.
*
* @param scrubberColor The color for the scrubber handle.
*/
public void setScrubberColor(@ColorInt int scrubberColor) {
scrubberPaint.setColor(scrubberColor);
invalidate(seekBounds);
}
/**
* Sets the color for the portion of the time bar after the current played position up to the
* current buffered position.
*
* @param bufferedColor The color for the portion of the time bar after the current played
* position up to the current buffered position.
*/
public void setBufferedColor(@ColorInt int bufferedColor) {
bufferedPaint.setColor(bufferedColor);
invalidate(seekBounds);
}
/**
* Sets the color for the portion of the time bar after the current played position.
*
* @param unplayedColor The color for the portion of the time bar after the current played
* position.
*/
public void setUnplayedColor(@ColorInt int unplayedColor) {
unplayedPaint.setColor(unplayedColor);
invalidate(seekBounds);
}
/**
* Sets the color for unplayed ad markers.
*
* @param adMarkerColor The color for unplayed ad markers.
*/
public void setAdMarkerColor(@ColorInt int adMarkerColor) {
adMarkerPaint.setColor(adMarkerColor);
invalidate(seekBounds);
}
/**
* Sets the color for played ad markers.
*
* @param playedAdMarkerColor The color for played ad markers.
*/
public void setPlayedAdMarkerColor(@ColorInt int playedAdMarkerColor) {
playedAdMarkerPaint.setColor(playedAdMarkerColor);
invalidate(seekBounds);
}
// TimeBar implementation.
@Override
public void addListener(OnScrubListener listener) {
Assertions.checkNotNull(listener);
listeners.add(listener);
}
@Override
public void removeListener(OnScrubListener listener) {
listeners.remove(listener);
}
@Override
public void setKeyTimeIncrement(long time) {
Assertions.checkArgument(time > 0);
keyCountIncrement = C.INDEX_UNSET;
keyTimeIncrement = time;
}
@Override
public void setKeyCountIncrement(int count) {
Assertions.checkArgument(count > 0);
keyCountIncrement = count;
keyTimeIncrement = C.TIME_UNSET;
}
@Override
public void setPosition(long position) {
if (this.position == position) {
return;
}
this.position = position;
setContentDescription(getProgressText());
update();
}
@Override
public void setBufferedPosition(long bufferedPosition) {
if (this.bufferedPosition == bufferedPosition) {
return;
}
this.bufferedPosition = bufferedPosition;
update();
}
@Override
public void setDuration(long duration) {
if (this.duration == duration) {
return;
}
this.duration = duration;
if (scrubbing && duration == C.TIME_UNSET) {
stopScrubbing(/* canceled= */ true);
}
update();
}
@Override
public long getPreferredUpdateDelay() {
int timeBarWidthDp = pxToDp(density, progressBar.width());
return timeBarWidthDp == 0 || duration == 0 || duration == C.TIME_UNSET
? Long.MAX_VALUE
: duration / timeBarWidthDp;
}
@Override
public void setAdGroupTimesMs(
@Nullable long[] adGroupTimesMs, @Nullable boolean[] playedAdGroups, int adGroupCount) {
Assertions.checkArgument(
adGroupCount == 0 || (adGroupTimesMs != null && playedAdGroups != null));
this.adGroupCount = adGroupCount;
this.adGroupTimesMs = adGroupTimesMs;
this.playedAdGroups = playedAdGroups;
update();
}
// View methods.
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
if (scrubbing && !enabled) {
stopScrubbing(/* canceled= */ true);
}
}
@Override
public void onDraw(Canvas canvas) {
canvas.save();
drawTimeBar(canvas);
drawPlayhead(canvas);
canvas.restore();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!isEnabled() || duration <= 0) {
return false;
}
Point touchPosition = resolveRelativeTouchPosition(event);
int x = touchPosition.x;
int y = touchPosition.y;
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
if (isInSeekBar(x, y)) {
positionScrubber(x);
startScrubbing(getScrubberPosition());
update();
invalidate();
return true;
}
break;
case MotionEvent.ACTION_MOVE:
if (scrubbing) {
if (y < fineScrubYThreshold) {
int relativeX = x - lastCoarseScrubXPosition;
positionScrubber(lastCoarseScrubXPosition + relativeX / FINE_SCRUB_RATIO);
} else {
lastCoarseScrubXPosition = x;
positionScrubber(x);
}
updateScrubbing(getScrubberPosition());
update();
invalidate();
return true;
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
if (scrubbing) {
stopScrubbing(/* canceled= */ event.getAction() == MotionEvent.ACTION_CANCEL);
return true;
}
break;
default:
// Do nothing.
}
return false;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (isEnabled()) {
long positionIncrement = getPositionIncrement();
switch (keyCode) {
case KeyEvent.KEYCODE_DPAD_LEFT:
positionIncrement = -positionIncrement;
// Fall through.
case KeyEvent.KEYCODE_DPAD_RIGHT:
if (scrubIncrementally(positionIncrement)) {
removeCallbacks(stopScrubbingRunnable);
postDelayed(stopScrubbingRunnable, STOP_SCRUBBING_TIMEOUT_MS);
return true;
}
break;
case KeyEvent.KEYCODE_DPAD_CENTER:
case KeyEvent.KEYCODE_ENTER:
if (scrubbing) {
stopScrubbing(/* canceled= */ false);
return true;
}
break;
default:
// Do nothing.
}
}
return super.onKeyDown(keyCode, event);
}
@Override
protected void onFocusChanged(
boolean gainFocus, int direction, @Nullable Rect previouslyFocusedRect) {
super.onFocusChanged(gainFocus, direction, previouslyFocusedRect);
if (scrubbing && !gainFocus) {
stopScrubbing(/* canceled= */ false);
}
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
updateDrawableState();
}
@Override
public void jumpDrawablesToCurrentState() {
super.jumpDrawablesToCurrentState();
if (scrubberDrawable != null) {
scrubberDrawable.jumpToCurrentState();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int height =
heightMode == MeasureSpec.UNSPECIFIED
? touchTargetHeight
: heightMode == MeasureSpec.EXACTLY
? heightSize
: Math.min(touchTargetHeight, heightSize);
setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), height);
updateDrawableState();
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int width = right - left;
int height = bottom - top;
int seekLeft = getPaddingLeft();
int seekRight = width - getPaddingRight();
int seekBoundsY;
int progressBarY;
int scrubberPadding = scrubberPaddingDisabled ? 0 : this.scrubberPadding;
if (barGravity == BAR_GRAVITY_BOTTOM) {
seekBoundsY = height - getPaddingBottom() - touchTargetHeight;
progressBarY =
height - getPaddingBottom() - barHeight - Math.max(scrubberPadding - (barHeight / 2), 0);
} else {
seekBoundsY = (height - touchTargetHeight) / 2;
progressBarY = (height - barHeight) / 2;
}
seekBounds.set(seekLeft, seekBoundsY, seekRight, seekBoundsY + touchTargetHeight);
progressBar.set(
seekBounds.left + scrubberPadding,
progressBarY,
seekBounds.right - scrubberPadding,
progressBarY + barHeight);
if (Util.SDK_INT >= 29) {
setSystemGestureExclusionRectsV29(width, height);
}
update();
}
@Override
public void onRtlPropertiesChanged(int layoutDirection) {
if (scrubberDrawable != null && setDrawableLayoutDirection(scrubberDrawable, layoutDirection)) {
invalidate();
}
}
@Override
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
if (event.getEventType() == AccessibilityEvent.TYPE_VIEW_SELECTED) {
event.getText().add(getProgressText());
}
event.setClassName(ACCESSIBILITY_CLASS_NAME);
}
@Override
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
info.setClassName(ACCESSIBILITY_CLASS_NAME);
info.setContentDescription(getProgressText());
if (duration <= 0) {
return;
}
if (Util.SDK_INT >= 21) {
info.addAction(AccessibilityAction.ACTION_SCROLL_FORWARD);
info.addAction(AccessibilityAction.ACTION_SCROLL_BACKWARD);
} else {
info.addAction(AccessibilityNodeInfo.ACTION_SCROLL_FORWARD);
info.addAction(AccessibilityNodeInfo.ACTION_SCROLL_BACKWARD);
}
}
@Override
public boolean performAccessibilityAction(int action, @Nullable Bundle args) {
if (super.performAccessibilityAction(action, args)) {
return true;
}
if (duration <= 0) {
return false;
}
if (action == AccessibilityNodeInfo.ACTION_SCROLL_BACKWARD) {
if (scrubIncrementally(-getPositionIncrement())) {
stopScrubbing(/* canceled= */ false);
}
} else if (action == AccessibilityNodeInfo.ACTION_SCROLL_FORWARD) {
if (scrubIncrementally(getPositionIncrement())) {
stopScrubbing(/* canceled= */ false);
}
} else {
return false;
}
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_SELECTED);
return true;
}
// Internal methods.
private void startScrubbing(long scrubPosition) {
this.scrubPosition = scrubPosition;
scrubbing = true;
setPressed(true);
ViewParent parent = getParent();
if (parent != null) {
parent.requestDisallowInterceptTouchEvent(true);
}
for (OnScrubListener listener : listeners) {
listener.onScrubStart(this, scrubPosition);
}
}
private void updateScrubbing(long scrubPosition) {
if (this.scrubPosition == scrubPosition) {
return;
}
this.scrubPosition = scrubPosition;
for (OnScrubListener listener : listeners) {
listener.onScrubMove(this, scrubPosition);
}
}
private void stopScrubbing(boolean canceled) {
removeCallbacks(stopScrubbingRunnable);
scrubbing = false;
setPressed(false);
ViewParent parent = getParent();
if (parent != null) {
parent.requestDisallowInterceptTouchEvent(false);
}
invalidate();
for (OnScrubListener listener : listeners) {
listener.onScrubStop(this, scrubPosition, canceled);
}
}
/**
* Incrementally scrubs the position by {@code positionChange}.
*
* @param positionChange The change in the scrubber position, in milliseconds. May be negative.
* @return Returns whether the scrubber position changed.
*/
private boolean scrubIncrementally(long positionChange) {
if (duration <= 0) {
return false;
}
long previousPosition = scrubbing ? scrubPosition : position;
long scrubPosition = Util.constrainValue(previousPosition + positionChange, 0, duration);
if (scrubPosition == previousPosition) {
return false;
}
if (!scrubbing) {
startScrubbing(scrubPosition);
} else {
updateScrubbing(scrubPosition);
}
update();
return true;
}
private void update() {
bufferedBar.set(progressBar);
scrubberBar.set(progressBar);
long newScrubberTime = scrubbing ? scrubPosition : position;
if (duration > 0) {
int bufferedPixelWidth = (int) ((progressBar.width() * bufferedPosition) / duration);
bufferedBar.right = Math.min(progressBar.left + bufferedPixelWidth, progressBar.right);
int scrubberPixelPosition = (int) ((progressBar.width() * newScrubberTime) / duration);
scrubberBar.right = Math.min(progressBar.left + scrubberPixelPosition, progressBar.right);
} else {
bufferedBar.right = progressBar.left;
scrubberBar.right = progressBar.left;
}
invalidate(seekBounds);
}
private void positionScrubber(float xPosition) {
scrubberBar.right = Util.constrainValue((int) xPosition, progressBar.left, progressBar.right);
}
private Point resolveRelativeTouchPosition(MotionEvent motionEvent) {
touchPosition.set((int) motionEvent.getX(), (int) motionEvent.getY());
return touchPosition;
}
private long getScrubberPosition() {
if (progressBar.width() <= 0 || duration == C.TIME_UNSET) {
return 0;
}
return (scrubberBar.width() * duration) / progressBar.width();
}
private boolean isInSeekBar(float x, float y) {
return seekBounds.contains((int) x, (int) y);
}
private void drawTimeBar(Canvas canvas) {
int progressBarHeight = progressBar.height();
int barTop = progressBar.centerY() - progressBarHeight / 2;
int barBottom = barTop + progressBarHeight;
if (duration <= 0) {
canvas.drawRect(progressBar.left, barTop, progressBar.right, barBottom, unplayedPaint);
return;
}
int bufferedLeft = bufferedBar.left;
int bufferedRight = bufferedBar.right;
int progressLeft = Math.max(Math.max(progressBar.left, bufferedRight), scrubberBar.right);
if (progressLeft < progressBar.right) {
canvas.drawRect(progressLeft, barTop, progressBar.right, barBottom, unplayedPaint);
}
bufferedLeft = Math.max(bufferedLeft, scrubberBar.right);
if (bufferedRight > bufferedLeft) {
canvas.drawRect(bufferedLeft, barTop, bufferedRight, barBottom, bufferedPaint);
}
if (scrubberBar.width() > 0) {
canvas.drawRect(scrubberBar.left, barTop, scrubberBar.right, barBottom, playedPaint);
}
if (adGroupCount == 0) {
return;
}
long[] adGroupTimesMs = Assertions.checkNotNull(this.adGroupTimesMs);
boolean[] playedAdGroups = Assertions.checkNotNull(this.playedAdGroups);
int adMarkerOffset = adMarkerWidth / 2;
for (int i = 0; i < adGroupCount; i++) {
long adGroupTimeMs = Util.constrainValue(adGroupTimesMs[i], 0, duration);
int markerPositionOffset =
(int) (progressBar.width() * adGroupTimeMs / duration) - adMarkerOffset;
int markerLeft =
progressBar.left
+ Math.min(progressBar.width() - adMarkerWidth, Math.max(0, markerPositionOffset));
Paint paint = playedAdGroups[i] ? playedAdMarkerPaint : adMarkerPaint;
canvas.drawRect(markerLeft, barTop, markerLeft + adMarkerWidth, barBottom, paint);
}
}
private void drawPlayhead(Canvas canvas) {
if (duration <= 0) {
return;
}
int playheadX = Util.constrainValue(scrubberBar.right, scrubberBar.left, progressBar.right);
int playheadY = scrubberBar.centerY();
if (scrubberDrawable == null) {
int scrubberSize =
(scrubbing || isFocused())
? scrubberDraggedSize
: (isEnabled() ? scrubberEnabledSize : scrubberDisabledSize);
int playheadRadius = (int) ((scrubberSize * scrubberScale) / 2);
canvas.drawCircle(playheadX, playheadY, playheadRadius, scrubberPaint);
} else {
int scrubberDrawableWidth = (int) (scrubberDrawable.getIntrinsicWidth() * scrubberScale);
int scrubberDrawableHeight = (int) (scrubberDrawable.getIntrinsicHeight() * scrubberScale);
scrubberDrawable.setBounds(
playheadX - scrubberDrawableWidth / 2,
playheadY - scrubberDrawableHeight / 2,
playheadX + scrubberDrawableWidth / 2,
playheadY + scrubberDrawableHeight / 2);
scrubberDrawable.draw(canvas);
}
}
private void updateDrawableState() {
if (scrubberDrawable != null
&& scrubberDrawable.isStateful()
&& scrubberDrawable.setState(getDrawableState())) {
invalidate();
}
}
@RequiresApi(29)
private void setSystemGestureExclusionRectsV29(int width, int height) {
if (lastExclusionRectangle != null
&& lastExclusionRectangle.width() == width
&& lastExclusionRectangle.height() == height) {
// Allocating inside onLayout is considered a DrawAllocation lint error, so avoid if possible.
return;
}
lastExclusionRectangle = new Rect(/* left= */ 0, /* top= */ 0, width, height);
setSystemGestureExclusionRects(Collections.singletonList(lastExclusionRectangle));
}
private String getProgressText() {
return Util.getStringForTime(formatBuilder, formatter, position);
}
private long getPositionIncrement() {
return keyTimeIncrement == C.TIME_UNSET
? (duration == C.TIME_UNSET ? 0 : (duration / keyCountIncrement))
: keyTimeIncrement;
}
private boolean setDrawableLayoutDirection(Drawable drawable) {
return Util.SDK_INT >= 23 && setDrawableLayoutDirection(drawable, getLayoutDirection());
}
private static boolean setDrawableLayoutDirection(Drawable drawable, int layoutDirection) {
return Util.SDK_INT >= 23 && drawable.setLayoutDirection(layoutDirection);
}
private static int dpToPx(float density, int dps) {
return (int) (dps * density + 0.5f);
}
private static int pxToDp(float density, int px) {
return (int) (px / density);
}
}
|
apache/flink | 36,370 | flink-runtime/src/test/java/org/apache/flink/runtime/blob/BlobServerPutTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.blob;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.testutils.CheckedThread;
import org.apache.flink.testutils.junit.utils.TempDirUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.OperatingSystem;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.concurrent.FutureUtils;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import javax.annotation.Nullable;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.nio.file.AccessDeniedException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.apache.flink.runtime.blob.BlobClientTest.validateGetAndClose;
import static org.apache.flink.runtime.blob.BlobKey.BlobType.PERMANENT_BLOB;
import static org.apache.flink.runtime.blob.BlobKey.BlobType.TRANSIENT_BLOB;
import static org.apache.flink.runtime.blob.BlobKeyTest.verifyKeyDifferentHashEquals;
import static org.apache.flink.runtime.blob.BlobServerGetTest.get;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Tests for successful and failing PUT operations against the BLOB server, and successful GET
* operations.
*/
class BlobServerPutTest {
@TempDir private java.nio.file.Path tempDir;
private final Random rnd = new Random();
// --- concurrency tests for utility methods which could fail during the put operation ---
/** Checked thread that calls {@link BlobServer#getStorageLocation(JobID, BlobKey)}. */
public static class ContentAddressableGetStorageLocation extends CheckedThread {
private final BlobServer server;
private final JobID jobId;
private final BlobKey key;
ContentAddressableGetStorageLocation(
BlobServer server, @Nullable JobID jobId, BlobKey key) {
this.server = server;
this.jobId = jobId;
this.key = key;
}
@Override
public void go() throws Exception {
server.getStorageLocation(jobId, key);
}
}
/** Tests concurrent calls to {@link BlobServer#getStorageLocation(JobID, BlobKey)}. */
@Test
void testServerContentAddressableGetStorageLocationConcurrentNoJob() throws Exception {
testServerContentAddressableGetStorageLocationConcurrent(null);
}
/** Tests concurrent calls to {@link BlobServer#getStorageLocation(JobID, BlobKey)}. */
@Test
void testServerContentAddressableGetStorageLocationConcurrentForJob() throws Exception {
testServerContentAddressableGetStorageLocationConcurrent(new JobID());
}
private void testServerContentAddressableGetStorageLocationConcurrent(
@Nullable final JobID jobId) throws Exception {
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
BlobKey key1 = new TransientBlobKey();
BlobKey key2 = new PermanentBlobKey();
CheckedThread[] threads =
new CheckedThread[] {
new ContentAddressableGetStorageLocation(server, jobId, key1),
new ContentAddressableGetStorageLocation(server, jobId, key1),
new ContentAddressableGetStorageLocation(server, jobId, key1),
new ContentAddressableGetStorageLocation(server, jobId, key2),
new ContentAddressableGetStorageLocation(server, jobId, key2),
new ContentAddressableGetStorageLocation(server, jobId, key2)
};
checkedThreadSimpleTest(threads);
}
}
/**
* Helper method to first start all threads and then wait for their completion.
*
* @param threads threads to use
* @throws Exception exceptions that are thrown from the threads
*/
private void checkedThreadSimpleTest(CheckedThread[] threads) throws Exception {
// start all threads
for (CheckedThread t : threads) {
t.start();
}
// wait for thread completion and check exceptions
for (CheckedThread t : threads) {
t.sync();
}
}
// --------------------------------------------------------------------------------------------
@Test
void testPutBufferSuccessfulGet1() throws IOException {
testPutBufferSuccessfulGet(null, null, TRANSIENT_BLOB);
}
@Test
void testPutBufferSuccessfulGet2() throws IOException {
testPutBufferSuccessfulGet(null, new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutBufferSuccessfulGet3() throws IOException {
testPutBufferSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutBufferSuccessfulGet4() throws IOException {
testPutBufferSuccessfulGet(new JobID(), null, TRANSIENT_BLOB);
}
@Test
void testPutBufferSuccessfulGetHa() throws IOException {
testPutBufferSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB);
}
/**
* Uploads two byte arrays for different jobs into the server via the {@link BlobServer}. File
* transfers should be successful.
*
* @param jobId1 first job id
* @param jobId2 second job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferSuccessfulGet(
@Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType)
throws IOException {
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOfRange(data, 10, 54);
// put data for jobId1 and verify
BlobKey key1a = put(server, jobId1, data, blobType);
assertThat(key1a).isNotNull();
// second upload of same data should yield a different BlobKey
BlobKey key1a2 = put(server, jobId1, data, blobType);
assertThat(key1a2).isNotNull();
verifyKeyDifferentHashEquals(key1a, key1a2);
BlobKey key1b = put(server, jobId1, data2, blobType);
assertThat(key1b).isNotNull();
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
// now put data for jobId2 and verify that both are ok
BlobKey key2a = put(server, jobId2, data, blobType);
assertThat(key2a).isNotNull();
verifyKeyDifferentHashEquals(key1a, key2a);
BlobKey key2b = put(server, jobId2, data2, blobType);
assertThat(key2b).isNotNull();
verifyKeyDifferentHashEquals(key1b, key2b);
// verify the accessibility and the BLOB contents
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// verify the accessibility and the BLOB contents one more time (transient BLOBs should
// not be deleted here)
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
}
}
// --------------------------------------------------------------------------------------------
@Test
void testPutStreamSuccessfulGet1() throws IOException {
testPutStreamSuccessfulGet(null, null, TRANSIENT_BLOB);
}
@Test
void testPutStreamSuccessfulGet2() throws IOException {
testPutStreamSuccessfulGet(null, new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutStreamSuccessfulGet3() throws IOException {
testPutStreamSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutStreamSuccessfulGet4() throws IOException {
testPutStreamSuccessfulGet(new JobID(), null, TRANSIENT_BLOB);
}
@Test
void testPutStreamSuccessfulGetHa() throws IOException {
testPutStreamSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB);
}
/**
* Uploads two file streams for different jobs into the server via the {@link BlobServer}. File
* transfers should be successful.
*
* @param jobId1 first job id
* @param jobId2 second job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutStreamSuccessfulGet(
@Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType)
throws IOException {
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOfRange(data, 10, 54);
// put data for jobId1 and verify
BlobKey key1a = put(server, jobId1, new ByteArrayInputStream(data), blobType);
assertThat(key1a).isNotNull();
// second upload of same data should yield a different BlobKey
BlobKey key1a2 = put(server, jobId1, new ByteArrayInputStream(data), blobType);
assertThat(key1a2).isNotNull();
verifyKeyDifferentHashEquals(key1a, key1a2);
BlobKey key1b = put(server, jobId1, new ByteArrayInputStream(data2), blobType);
assertThat(key1b).isNotNull();
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
// now put data for jobId2 and verify that both are ok
BlobKey key2a = put(server, jobId2, new ByteArrayInputStream(data), blobType);
assertThat(key2a).isNotNull();
verifyKeyDifferentHashEquals(key1a, key2a);
BlobKey key2b = put(server, jobId2, new ByteArrayInputStream(data2), blobType);
assertThat(key2b).isNotNull();
verifyKeyDifferentHashEquals(key1b, key2b);
// verify the accessibility and the BLOB contents
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// verify the accessibility and the BLOB contents one more time (transient BLOBs should
// not be deleted here)
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
}
}
// --------------------------------------------------------------------------------------------
@Test
void testPutChunkedStreamSuccessfulGet1() throws IOException {
testPutChunkedStreamSuccessfulGet(null, null, TRANSIENT_BLOB);
}
@Test
void testPutChunkedStreamSuccessfulGet2() throws IOException {
testPutChunkedStreamSuccessfulGet(null, new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutChunkedStreamSuccessfulGet3() throws IOException {
testPutChunkedStreamSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB);
}
@Test
void testPutChunkedStreamSuccessfulGet4() throws IOException {
testPutChunkedStreamSuccessfulGet(new JobID(), null, TRANSIENT_BLOB);
}
@Test
void testPutChunkedStreamSuccessfulGetHa() throws IOException {
testPutChunkedStreamSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB);
}
/**
* Uploads two chunked file streams for different jobs into the server via the {@link
* BlobServer}. File transfers should be successful.
*
* @param jobId1 first job id
* @param jobId2 second job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutChunkedStreamSuccessfulGet(
@Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType)
throws IOException {
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
byte[] data2 = Arrays.copyOfRange(data, 10, 54);
// put data for jobId1 and verify
BlobKey key1a = put(server, jobId1, new ChunkedInputStream(data, 19), blobType);
assertThat(key1a).isNotNull();
// second upload of same data should yield a different BlobKey
BlobKey key1a2 = put(server, jobId1, new ChunkedInputStream(data, 19), blobType);
assertThat(key1a2).isNotNull();
verifyKeyDifferentHashEquals(key1a, key1a2);
BlobKey key1b = put(server, jobId1, new ChunkedInputStream(data2, 19), blobType);
assertThat(key1b).isNotNull();
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
// now put data for jobId2 and verify that both are ok
BlobKey key2a = put(server, jobId2, new ChunkedInputStream(data, 19), blobType);
assertThat(key2a).isNotNull();
verifyKeyDifferentHashEquals(key1a, key2a);
BlobKey key2b = put(server, jobId2, new ChunkedInputStream(data2, 19), blobType);
assertThat(key2b).isNotNull();
verifyKeyDifferentHashEquals(key1b, key2b);
// verify the accessibility and the BLOB contents
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
// verify the accessibility and the BLOB contents one more time (transient BLOBs should
// not be deleted here)
verifyContents(server, jobId1, key1a, data);
verifyContents(server, jobId1, key1a2, data);
verifyContents(server, jobId1, key1b, data2);
verifyContents(server, jobId2, key2a, data);
verifyContents(server, jobId2, key2b, data2);
}
}
// --------------------------------------------------------------------------------------------
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsNoJob() throws IOException {
testPutBufferFails(null, TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsForJob() throws IOException {
testPutBufferFails(new JobID(), TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsForJobHa() throws IOException {
testPutBufferFails(new JobID(), PERMANENT_BLOB);
}
/**
* Uploads a byte array to a server which cannot create any files via the {@link BlobServer}.
* File transfers should fail.
*
* @param jobId job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferFails(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(OperatingSystem.isWindows()).as("setWritable doesn't work on Windows").isFalse();
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
// make sure the blob server cannot create any files in its storage dir
File tempFileDir = server.createTemporaryFilename().getParentFile().getParentFile();
assertThat(tempFileDir.setExecutable(true, false)).isTrue();
assertThat(tempFileDir.setReadable(true, false)).isTrue();
assertThat(tempFileDir.setWritable(false, false)).isTrue();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
// upload the file to the server directly
assertThatThrownBy(() -> put(server, jobId, data, blobType))
.isInstanceOf(AccessDeniedException.class);
// set writable again to make sure we can remove the directory
assertThat(tempFileDir.setWritable(true, false)).isTrue();
}
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsIncomingNoJob() throws IOException {
testPutBufferFailsIncoming(null, TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsIncomingForJob() throws IOException {
testPutBufferFailsIncoming(new JobID(), TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsIncomingForJobHa() throws IOException {
testPutBufferFailsIncoming(new JobID(), PERMANENT_BLOB);
}
/**
* Uploads a byte array to a server which cannot create incoming files via the {@link
* BlobServer}. File transfers should fail.
*
* @param jobId job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferFailsIncoming(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(OperatingSystem.isWindows()).as("setWritable doesn't work on Windows").isFalse();
File tempFileDir = null;
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
// make sure the blob server cannot create any files in its storage dir
tempFileDir = server.createTemporaryFilename().getParentFile();
assertThat(tempFileDir.setExecutable(true, false)).isTrue();
assertThat(tempFileDir.setReadable(true, false)).isTrue();
assertThat(tempFileDir.setWritable(false, false)).isTrue();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
try {
// upload the file to the server directly
assertThatThrownBy(() -> put(server, jobId, data, blobType))
.isInstanceOf(IOException.class)
.hasMessageEndingWith(" (Permission denied)");
} finally {
File storageDir = tempFileDir.getParentFile();
// only the incoming directory should exist (no job directory!)
assertThat(storageDir.list()).containsExactly("incoming");
}
} finally {
// set writable again to make sure we can remove the directory
if (tempFileDir != null) {
//noinspection ResultOfMethodCallIgnored
tempFileDir.setWritable(true, false);
}
}
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsStoreNoJob() throws IOException {
testPutBufferFailsStore(null, TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsStoreForJob() throws IOException {
testPutBufferFailsStore(new JobID(), TRANSIENT_BLOB);
}
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser")
@Test
void testPutBufferFailsStoreForJobHa() throws IOException {
testPutBufferFailsStore(new JobID(), PERMANENT_BLOB);
}
/**
* Uploads a byte array to a server which cannot move incoming files to the final blob store via
* the {@link BlobServer}. File transfers should fail.
*
* @param jobId job id
* @param blobType whether the BLOB should become permanent or transient
*/
private void testPutBufferFailsStore(@Nullable final JobID jobId, BlobKey.BlobType blobType)
throws IOException {
assumeThat(OperatingSystem.isWindows()).as("setWritable doesn't work on Windows").isFalse();
File jobStoreDir = null;
try (BlobServer server = TestingBlobUtils.createServer(tempDir)) {
server.start();
// make sure the blob server cannot create any files in its storage dir
jobStoreDir =
server.getStorageLocation(jobId, BlobKey.createKey(blobType)).getParentFile();
assertThat(jobStoreDir.setExecutable(true, false)).isTrue();
assertThat(jobStoreDir.setReadable(true, false)).isTrue();
assertThat(jobStoreDir.setWritable(false, false)).isTrue();
byte[] data = new byte[2000000];
rnd.nextBytes(data);
try {
// upload the file to the server directly
assertThatThrownBy(() -> put(server, jobId, data, blobType))
.isInstanceOf(AccessDeniedException.class);
} finally {
// there should be no remaining incoming files
File incomingFileDir = new File(jobStoreDir.getParent(), "incoming");
assertThat(incomingFileDir.list()).isEmpty();
// there should be no files in the job directory
assertThat(jobStoreDir.list()).isEmpty();
}
} finally {
// set writable again to make sure we can remove the directory
if (jobStoreDir != null) {
//noinspection ResultOfMethodCallIgnored
jobStoreDir.setWritable(true, false);
}
}
}
@Test
void testConcurrentPutOperationsNoJob()
throws IOException, ExecutionException, InterruptedException {
testConcurrentPutOperations(null, TRANSIENT_BLOB);
}
@Test
void testConcurrentPutOperationsForJob()
throws IOException, ExecutionException, InterruptedException {
testConcurrentPutOperations(new JobID(), TRANSIENT_BLOB);
}
@Test
void testConcurrentPutOperationsForJobHa()
throws IOException, ExecutionException, InterruptedException {
testConcurrentPutOperations(new JobID(), PERMANENT_BLOB);
}
@Test
void testFailedBlobStorePutsDeletesLocalBlob() throws IOException {
final BlobKey.BlobType blobType = PERMANENT_BLOB;
final JobID jobId = JobID.generate();
final byte[] data = new byte[] {1, 2, 3};
final File storageDir = TempDirUtils.newFolder(tempDir);
final TestingBlobStore blobStore =
new TestingBlobStoreBuilder()
.setPutFunction(
(file, jobID, blobKey) -> {
throw new IOException("Could not persist the file.");
})
.createTestingBlobStore();
try (final BlobServer blobServer =
new BlobServer(new Configuration(), storageDir, blobStore)) {
assertThatThrownBy(() -> put(blobServer, jobId, data, blobType))
.isInstanceOf(IOException.class);
final File jobSpecificStorageDirectory =
new File(BlobUtils.getStorageLocationPath(storageDir.getAbsolutePath(), jobId));
assertThat(jobSpecificStorageDirectory).isEmptyDirectory();
}
}
/**
* [FLINK-6020] Tests that concurrent put operations will only upload the file once to the
* {@link BlobStore} and that the files are not corrupt at any time.
*
* @param jobId job ID to use (or <tt>null</tt> if job-unrelated)
* @param blobType whether the BLOB should become permanent or transient
*/
private void testConcurrentPutOperations(
@Nullable final JobID jobId, final BlobKey.BlobType blobType)
throws IOException, InterruptedException, ExecutionException {
final Configuration config = new Configuration();
final int concurrentPutOperations = 2;
final int dataSize = 1024;
Collection<BlobKey> persistedBlobs = ConcurrentHashMap.newKeySet();
TestingBlobStore blobStore =
new TestingBlobStoreBuilder()
.setPutFunction(
(file, jobID, blobKey) -> {
persistedBlobs.add(blobKey);
return true;
})
.createTestingBlobStore();
final CountDownLatch countDownLatch = new CountDownLatch(concurrentPutOperations);
final byte[] data = new byte[dataSize];
ArrayList<CompletableFuture<BlobKey>> allFutures = new ArrayList<>(concurrentPutOperations);
ExecutorService executor = Executors.newFixedThreadPool(concurrentPutOperations);
try (BlobServer server = TestingBlobUtils.createServer(tempDir, config, blobStore)) {
server.start();
for (int i = 0; i < concurrentPutOperations; i++) {
CompletableFuture<BlobKey> putFuture =
CompletableFuture.supplyAsync(
() -> {
try {
BlockingInputStream inputStream =
new BlockingInputStream(countDownLatch, data);
BlobKey uploadedKey =
put(server, jobId, inputStream, blobType);
// check the uploaded file's contents (concurrently)
verifyContents(server, jobId, uploadedKey, data);
return uploadedKey;
} catch (IOException e) {
throw new CompletionException(
new FlinkException("Could not upload blob.", e));
}
},
executor);
allFutures.add(putFuture);
}
FutureUtils.ConjunctFuture<Collection<BlobKey>> conjunctFuture =
FutureUtils.combineAll(allFutures);
// wait until all operations have completed and check that no exception was thrown
Collection<BlobKey> blobKeys = conjunctFuture.get();
Iterator<BlobKey> blobKeyIterator = blobKeys.iterator();
assertThat(blobKeyIterator).hasNext();
BlobKey blobKey = blobKeyIterator.next();
// make sure that all blob keys are the same
while (blobKeyIterator.hasNext()) {
verifyKeyDifferentHashEquals(blobKey, blobKeyIterator.next());
}
// check the uploaded file's contents
verifyContents(server, jobId, blobKey, data);
// check that we only uploaded the file once to the blob store
if (blobType == PERMANENT_BLOB) {
assertThat(persistedBlobs).hasSameElementsAs(blobKeys);
} else {
// can't really verify much in the other cases other than that the put operations
// should
// work and not corrupt files
assertThat(persistedBlobs).isEmpty();
}
} finally {
executor.shutdownNow();
}
}
// --------------------------------------------------------------------------------------------
/**
* Helper to choose the right {@link BlobServer#putTransient} method.
*
* @param blobType whether the BLOB should become permanent or transient
* @return blob key for the uploaded data
*/
static BlobKey put(
BlobService service, @Nullable JobID jobId, InputStream data, BlobKey.BlobType blobType)
throws IOException {
if (blobType == PERMANENT_BLOB) {
if (service instanceof BlobServer) {
return ((BlobServer) service).putPermanent(jobId, data);
} else {
throw new UnsupportedOperationException(
"uploading streams is only possible at the BlobServer");
}
} else if (jobId == null) {
return service.getTransientBlobService().putTransient(data);
} else {
return service.getTransientBlobService().putTransient(jobId, data);
}
}
/**
* Helper to choose the right {@link BlobServer#putTransient} method.
*
* @param blobType whether the BLOB should become permanent or transient
* @return blob key for the uploaded data
*/
static BlobKey put(
BlobService service, @Nullable JobID jobId, byte[] data, BlobKey.BlobType blobType)
throws IOException {
if (blobType == PERMANENT_BLOB) {
if (service instanceof BlobServer) {
return ((BlobServer) service).putPermanent(jobId, data);
} else {
// implement via JAR file upload instead:
File tmpFile = Files.createTempFile("blob", ".jar").toFile();
try {
FileUtils.writeByteArrayToFile(tmpFile, data);
InetSocketAddress serverAddress =
new InetSocketAddress("localhost", service.getPort());
// uploading HA BLOBs works on BlobServer only (and, for now, via the
// BlobClient)
Configuration clientConfig = new Configuration();
List<Path> jars =
Collections.singletonList(new Path(tmpFile.getAbsolutePath()));
List<PermanentBlobKey> keys =
BlobClient.uploadFiles(serverAddress, clientConfig, jobId, jars);
assertThat(keys).hasSize(1);
return keys.get(0);
} finally {
//noinspection ResultOfMethodCallIgnored
tmpFile.delete();
}
}
} else if (jobId == null) {
return service.getTransientBlobService().putTransient(data);
} else {
return service.getTransientBlobService().putTransient(jobId, data);
}
}
/**
* GET the data stored at the two keys and check that it is equal to <tt>data</tt>.
*
* @param blobService BlobServer to use
* @param jobId job ID or <tt>null</tt> if job-unrelated
* @param key blob key
* @param data expected data
*/
static void verifyContents(
BlobService blobService, @Nullable JobID jobId, BlobKey key, byte[] data)
throws IOException {
File file = get(blobService, jobId, key);
validateGetAndClose(Files.newInputStream(file.toPath()), data);
}
/**
* GET the data stored at the two keys and check that it is equal to <tt>data</tt>.
*
* @param blobService BlobServer to use
* @param jobId job ID or <tt>null</tt> if job-unrelated
* @param key blob key
* @param data expected data
*/
static void verifyContents(
BlobService blobService, @Nullable JobID jobId, BlobKey key, InputStream data)
throws IOException {
File file = get(blobService, jobId, key);
validateGetAndClose(Files.newInputStream(file.toPath()), data);
}
// --------------------------------------------------------------------------------------------
static final class BlockingInputStream extends InputStream {
private final CountDownLatch countDownLatch;
private final byte[] data;
private int index = 0;
BlockingInputStream(CountDownLatch countDownLatch, byte[] data) {
this.countDownLatch = Preconditions.checkNotNull(countDownLatch);
this.data = Preconditions.checkNotNull(data);
}
@Override
public int read() throws IOException {
countDownLatch.countDown();
try {
countDownLatch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("Blocking operation was interrupted.", e);
}
if (index >= data.length) {
return -1;
} else {
return data[index++];
}
}
}
// --------------------------------------------------------------------------------------------
static final class ChunkedInputStream extends InputStream {
private final byte[][] data;
private int x = 0, y = 0;
ChunkedInputStream(byte[] data, int numChunks) {
this.data = new byte[numChunks][];
int bytesPerChunk = data.length / numChunks;
int bytesTaken = 0;
for (int i = 0; i < numChunks - 1; i++, bytesTaken += bytesPerChunk) {
this.data[i] = new byte[bytesPerChunk];
System.arraycopy(data, bytesTaken, this.data[i], 0, bytesPerChunk);
}
this.data[numChunks - 1] = new byte[data.length - bytesTaken];
System.arraycopy(
data, bytesTaken, this.data[numChunks - 1], 0, this.data[numChunks - 1].length);
}
@Override
public int read() {
if (x < data.length) {
byte[] curr = data[x];
if (y < curr.length) {
byte next = curr[y];
y++;
return next;
} else {
y = 0;
x++;
return read();
}
} else {
return -1;
}
}
@Override
public int read(byte[] b, int off, int len) {
if (len == 0) {
return 0;
}
if (x < data.length) {
byte[] curr = data[x];
if (y < curr.length) {
int toCopy = Math.min(len, curr.length - y);
System.arraycopy(curr, y, b, off, toCopy);
y += toCopy;
return toCopy;
} else {
y = 0;
x++;
return read(b, off, len);
}
} else {
return -1;
}
}
}
}
|
apache/jackrabbit-oak | 36,522 | oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/pipelined/PipelinedStrategy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined;
import com.mongodb.MongoClientSettings;
import com.mongodb.ConnectionString;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.apache.jackrabbit.oak.commons.Compression;
import org.apache.jackrabbit.oak.commons.IOUtils;
import org.apache.jackrabbit.oak.commons.concurrent.ExecutorCloser;
import org.apache.jackrabbit.oak.commons.conditions.Validate;
import org.apache.jackrabbit.oak.commons.time.Stopwatch;
import org.apache.jackrabbit.oak.index.ThreadMonitor;
import org.apache.jackrabbit.oak.index.indexer.document.flatfile.NodeStateEntryWriter;
import org.apache.jackrabbit.oak.index.indexer.document.indexstore.IndexStoreSortStrategyBase;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore;
import org.apache.jackrabbit.oak.plugins.document.RevisionVector;
import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
import org.apache.jackrabbit.oak.plugins.index.ConfigHelper;
import org.apache.jackrabbit.oak.plugins.index.FormattingUtils;
import org.apache.jackrabbit.oak.plugins.index.IndexingReporter;
import org.apache.jackrabbit.oak.plugins.index.MetricsFormatter;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.filter.PathFilter;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.bson.RawBsonDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCountBin;
import static org.apache.jackrabbit.oak.plugins.index.IndexUtils.INDEXING_PHASE_LOGGER;
/**
* Downloads the contents of the MongoDB repository dividing the tasks in a pipeline with the following stages:
* <ul>
* <li>Download - Downloads from Mongo all the documents in the node store.
* <li>Transform - Converts Mongo documents to node state entries.
* <li>Sort and save - Sorts the batch of node state entries and saves them to disk
* <li>Merge sorted files - Merge the intermediate sorted files into a single file (the final FlatFileStore).
* </ul>
* <p>
* <h2>Memory management</h2>
* <p>
* For efficiency, the intermediate sorted files should be as large as possible given the memory constraints.
* This strategy accumulates the entries that will be stored in each of these files in memory until reaching a maximum
* configurable size, at which point it sorts the data and writes it to a file. The data is accumulated in instances of
* {@link NodeStateEntryBatch}. This class contains two data structures:
* <ul>
* <li>A {@link java.nio.ByteBuffer} for the binary representation of the entry, that is, the byte array that will be written to the file.
* This buffer contains length-prefixed byte arrays, that is, each entry is {@code <size><data>}, where size is a 4 byte int.
* <li>An array of {@link SortKey} instances, which contain the paths of each entry and are used to sort the entries. Each element
* in this array also contains the position in the ByteBuffer of the serialized representation of the entry.
* </ul>
* This representation has several advantages:
* <ul>
* <li>It is compact, as a String object in the heap requires more memory than a length-prefixed byte array in the ByteBuffer.
* <li>Predictable memory usage - the memory used by the {@link java.nio.ByteBuffer} is fixed and allocated at startup
* (more on this later). The memory used by the array of {@link SortKey} is not bounded, but these objects are small,
* as they contain little more than the path of the entry, and we can easily put limits on the maximum number of entries
* kept in a buffer.
* </ul>
* <p>
* The instances of {@link NodeStateEntryBatch} are created at launch time. We create {@code #transformThreads+1} buffers.
* This way, except for some rare situations, each transform thread will have its own buffer where to write the entries
* and there will be an extra buffer to be used by the Save-and-Sort thread, so that all the transform and sort threads
* can operate concurrently.
* <p>
* These buffers are reused. Once the Save-and-Sort thread finishes processing a buffer, it clears it and sends it back
* to the transform threads. For this, we use two queues, one with empty buffers, from where the transform threads take
* their buffers when they need one, and another with full buffers, which are read by the Save-and-Sort thread.
* <p>
* Reusing the buffers reduces significantly the pressure on the garbage collector and ensures that we do not run out
* of memory, as the largest blocks of memory are pre-allocated and reused.
* <p>
* The total amount of memory used by the buffers is a configurable parameter (env variable {@link #OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB}).
* This memory is divided in {@code numberOfBuffers + 1 </code>} regions, each of
* {@code regionSize = PIPELINED_WORKING_MEMORY_MB/(#numberOfBuffers + 1)} size.
* Each ByteBuffer is of {@code regionSize} big. The extra region is to account for the memory taken by the {@link SortKey}
* entries. There is also a maximum limit on the number of entries, which is calculated based on regionSize
* (we assume each {@link SortKey} entry requires 256 bytes).
* <p>
* The transform threads will stop filling a buffer and enqueue it for sorting and saving once either the byte buffer is
* full or the number of entries in the buffer reaches the limit.
* <p>
*
* <h2>Retrials on broken MongoDB connections</h2>
*/
public class PipelinedStrategy extends IndexStoreSortStrategyBase {
public static final String OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB = "oak.indexer.pipelined.mongoDocBatchMaxSizeMB";
public static final int DEFAULT_OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB = 4;
public static final String OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS = "oak.indexer.pipelined.mongoDocBatchMaxNumberOfDocuments";
public static final int DEFAULT_OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS = 10000;
public static final String OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB = "oak.indexer.pipelined.mongoDocQueueReservedMemoryMB";
public static final int DEFAULT_OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB = 128;
public static final String OAK_INDEXER_PIPELINED_TRANSFORM_THREADS = "oak.indexer.pipelined.transformThreads";
public static final int DEFAULT_OAK_INDEXER_PIPELINED_TRANSFORM_THREADS = 2;
public static final String OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB = "oak.indexer.pipelined.workingMemoryMB";
// 0 means autodetect
public static final int DEFAULT_OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB = 0;
// Between 1 and 100
public static final String OAK_INDEXER_PIPELINED_SORT_BUFFER_MEMORY_PERCENTAGE = "oak.indexer.pipelined.sortBufferMemoryPercentage";
public static final int DEFAULT_OAK_INDEXER_PIPELINED_SORT_BUFFER_MEMORY_PERCENTAGE = 25;
public static final String OAK_INDEXER_PIPELINED_NODE_DOCUMENT_FILTER_FILTERED_PATH = "oak.indexer.pipelined.nodeDocument.filter.filteredPath";
public static final String OAK_INDEXER_PIPELINED_NODE_DOCUMENT_FILTER_SUFFIXES_TO_SKIP = "oak.indexer.pipelined.nodeDocument.filter.suffixesToSkip";
private final String filteredPath = ConfigHelper.getSystemPropertyAsString(OAK_INDEXER_PIPELINED_NODE_DOCUMENT_FILTER_FILTERED_PATH, "");
private final List<String> suffixesToSkip = ConfigHelper.getSystemPropertyAsStringList(OAK_INDEXER_PIPELINED_NODE_DOCUMENT_FILTER_SUFFIXES_TO_SKIP, "", ';');
static final NodeStateEntryBatch SENTINEL_NSE_BUFFER = new NodeStateEntryBatch(ByteBuffer.allocate(0), 0);
static final Path SENTINEL_SORTED_FILES_QUEUE = Paths.get("SENTINEL");
static final Charset FLATFILESTORE_CHARSET = StandardCharsets.UTF_8;
static final char FLATFILESTORE_LINE_SEPARATOR = '\n';
static final byte FLATFILESTORE_DELIMITER = '|';
private static final Logger LOG = LoggerFactory.getLogger(PipelinedStrategy.class);
// A MongoDB document is at most 16MB, so the buffer that holds node state entries must be at least that big
private static final int MIN_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB = 16;
private static final int MIN_AUTODETECT_WORKING_MEMORY_MB = 128;
private static final int MIN_ENTRY_BATCH_BUFFER_SIZE_MB = 32;
private static final int MAX_AUTODETECT_WORKING_MEMORY_MB = 4000;
private static <T> void printStatistics(ArrayBlockingQueue<T[]> mongoDocQueue,
ArrayBlockingQueue<NodeStateEntryBatch> emptyBuffersQueue,
ArrayBlockingQueue<NodeStateEntryBatch> nonEmptyBuffersQueue,
ArrayBlockingQueue<Path> sortedFilesQueue,
TransformStageStatistics transformStageStatistics,
boolean printHistogramsAtInfo) {
String queueSizeStats = MetricsFormatter.newBuilder()
.add("mongoDocQueue", mongoDocQueue.size())
.add("emptyBuffersQueue", emptyBuffersQueue.size())
.add("nonEmptyBuffersQueue", nonEmptyBuffersQueue.size())
.add("sortedFilesQueue", sortedFilesQueue.size())
.build();
LOG.info("Queue sizes: {}", queueSizeStats);
LOG.info("Transform stats: {}", transformStageStatistics.formatStats());
prettyPrintTransformStatisticsHistograms(transformStageStatistics, printHistogramsAtInfo);
}
private static void prettyPrintTransformStatisticsHistograms(TransformStageStatistics transformStageStatistics, boolean printHistogramAtInfo) {
if (printHistogramAtInfo) {
LOG.info("Top hidden paths rejected: {}", transformStageStatistics.getHiddenPathsRejectedHistogram().prettyPrint());
LOG.info("Top paths filtered: {}", transformStageStatistics.getFilteredPathsRejectedHistogram().prettyPrint());
} else {
LOG.debug("Top hidden paths rejected: {}", transformStageStatistics.getHiddenPathsRejectedHistogram().prettyPrint());
LOG.debug("Top paths filtered: {}", transformStageStatistics.getFilteredPathsRejectedHistogram().prettyPrint());
}
}
private final MongoDocumentStore docStore;
private final ConnectionString mongoClientURI;
private final DocumentNodeStore documentNodeStore;
private final RevisionVector rootRevision;
private final BlobStore blobStore;
private final PathElementComparator pathComparator;
private final List<PathFilter> pathFilters;
private final StatisticsProvider statisticsProvider;
private final IndexingReporter indexingReporter;
private final int numberOfTransformThreads;
private final int mongoDocQueueSize;
private final int mongoDocBatchMaxSizeMB;
private final int mongoDocBatchMaxNumberOfDocuments;
private final int nseBuffersCount;
private final int nseBuffersSizeBytes;
private long nodeStateEntriesExtracted;
/**
* @param mongoClientURI URI of the Mongo cluster.
* @param pathPredicate Used by the transform stage to test if a node should be kept or discarded.
* @param pathFilters If non-empty, the download stage will use these filters to create a query that downloads
* only the matching MongoDB documents.
* @param statisticsProvider Used to collect statistics about the indexing process.
* @param indexingReporter Used to collect diagnostics, metrics and statistics and report them at the end of the indexing process.
*/
public PipelinedStrategy(ConnectionString mongoClientURI,
MongoDocumentStore documentStore,
DocumentNodeStore documentNodeStore,
RevisionVector rootRevision,
Set<String> preferredPathElements,
BlobStore blobStore,
File storeDir,
Compression algorithm,
Predicate<String> pathPredicate,
List<PathFilter> pathFilters,
String checkpoint,
StatisticsProvider statisticsProvider,
IndexingReporter indexingReporter) {
super(storeDir, algorithm, pathPredicate, preferredPathElements, checkpoint);
this.mongoClientURI = mongoClientURI;
this.docStore = documentStore;
this.documentNodeStore = documentNodeStore;
this.rootRevision = rootRevision;
this.blobStore = blobStore;
this.pathComparator = new PathElementComparator(preferredPathElements);
this.pathFilters = pathFilters;
this.statisticsProvider = statisticsProvider;
this.indexingReporter = indexingReporter;
Validate.checkState(documentStore.isReadOnly(), "Traverser can only be used with readOnly store");
int mongoDocQueueReservedMemoryMB = ConfigHelper.getSystemPropertyAsInt(OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB, DEFAULT_OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB);
Validate.checkArgument(mongoDocQueueReservedMemoryMB >= MIN_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB,
"Invalid value for property " + OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB + ": " + mongoDocQueueReservedMemoryMB + ". Must be >= " + MIN_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB);
this.indexingReporter.addConfig(OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB, String.valueOf(mongoDocQueueReservedMemoryMB));
this.mongoDocBatchMaxSizeMB = ConfigHelper.getSystemPropertyAsInt(OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB, DEFAULT_OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB);
Validate.checkArgument(mongoDocBatchMaxSizeMB > 0,
"Invalid value for property " + OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB + ": " + mongoDocBatchMaxSizeMB + ". Must be > 0");
this.indexingReporter.addConfig(OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB, String.valueOf(mongoDocBatchMaxSizeMB));
this.mongoDocBatchMaxNumberOfDocuments = ConfigHelper.getSystemPropertyAsInt(OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS, DEFAULT_OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS);
Validate.checkArgument(mongoDocBatchMaxNumberOfDocuments > 0,
"Invalid value for property " + OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS + ": " + mongoDocBatchMaxNumberOfDocuments + ". Must be > 0");
this.indexingReporter.addConfig(OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS, String.valueOf(mongoDocBatchMaxNumberOfDocuments));
this.numberOfTransformThreads = ConfigHelper.getSystemPropertyAsInt(OAK_INDEXER_PIPELINED_TRANSFORM_THREADS, DEFAULT_OAK_INDEXER_PIPELINED_TRANSFORM_THREADS);
Validate.checkArgument(numberOfTransformThreads > 0,
"Invalid value for property " + OAK_INDEXER_PIPELINED_TRANSFORM_THREADS + ": " + numberOfTransformThreads + ". Must be > 0");
this.indexingReporter.addConfig(OAK_INDEXER_PIPELINED_TRANSFORM_THREADS, String.valueOf(numberOfTransformThreads));
int sortBufferMemoryPercentage = ConfigHelper.getSystemPropertyAsInt(OAK_INDEXER_PIPELINED_SORT_BUFFER_MEMORY_PERCENTAGE, DEFAULT_OAK_INDEXER_PIPELINED_SORT_BUFFER_MEMORY_PERCENTAGE);
Validate.checkArgument(sortBufferMemoryPercentage > 0 && sortBufferMemoryPercentage <= 100,
"Invalid value for property " + OAK_INDEXER_PIPELINED_SORT_BUFFER_MEMORY_PERCENTAGE + ": " + numberOfTransformThreads + ". Must be between 1 and 100");
this.indexingReporter.addConfig(OAK_INDEXER_PIPELINED_SORT_BUFFER_MEMORY_PERCENTAGE, String.valueOf(sortBufferMemoryPercentage));
// mongo-dump <-> transform threads
Validate.checkArgument(mongoDocQueueReservedMemoryMB >= 8 * mongoDocBatchMaxSizeMB,
"Invalid values for properties " + OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB + " and " + OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB +
": " + OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB + " must be at least 8x " + OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB +
", but are " + mongoDocQueueReservedMemoryMB + " and " + mongoDocBatchMaxSizeMB + ", respectively"
);
this.mongoDocQueueSize = mongoDocQueueReservedMemoryMB / mongoDocBatchMaxSizeMB;
// Derived values for transform <-> sort-save
int nseWorkingMemoryMB = readNSEBuffersReservedMemory();
this.nseBuffersCount = 1 + numberOfTransformThreads;
long nseWorkingMemoryBytes = (long) nseWorkingMemoryMB * FileUtils.ONE_MB;
// The working memory is divided in the following regions:
// - #transforThreads NSE Binary buffers
// - x1 Memory reserved for the array created by the sort-batch thread with the keys of the entries
// in the batch that is being sorted
long memoryReservedForSortKeysArray = estimateMaxSizeOfSortKeyArray(nseWorkingMemoryBytes, nseBuffersCount, sortBufferMemoryPercentage);
long memoryReservedForBuffers = nseWorkingMemoryBytes - memoryReservedForSortKeysArray;
// A ByteBuffer can be at most Integer.MAX_VALUE bytes long
this.nseBuffersSizeBytes = limitToIntegerRange(memoryReservedForBuffers / nseBuffersCount);
if (nseBuffersSizeBytes < MIN_ENTRY_BATCH_BUFFER_SIZE_MB * FileUtils.ONE_MB) {
throw new IllegalArgumentException("Entry batch buffer size too small: " + nseBuffersSizeBytes +
" bytes. Must be at least " + MIN_ENTRY_BATCH_BUFFER_SIZE_MB + " MB. " +
"To increase the size of the buffers, either increase the size of the working memory region " +
"(system property " + OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB + ") or decrease the number of transform " +
"threads (" + OAK_INDEXER_PIPELINED_TRANSFORM_THREADS + ")");
}
LOG.info("MongoDocumentQueue: [ reservedMemory: {} MB, batchMaxSize: {} MB, queueSize: {} (reservedMemory/batchMaxSize) ]",
mongoDocQueueReservedMemoryMB,
mongoDocBatchMaxSizeMB,
mongoDocQueueSize);
LOG.info("NodeStateEntryBuffers: [ workingMemory: {} MB, numberOfBuffers: {}, bufferSize: {}, sortBufferReservedMemory: {} ]",
nseWorkingMemoryMB,
nseBuffersCount,
IOUtils.humanReadableByteCountBin(nseBuffersSizeBytes),
IOUtils.humanReadableByteCountBin(memoryReservedForSortKeysArray)
);
}
static long estimateMaxSizeOfSortKeyArray(long nseWorkingMemoryBytes, long nseBuffersCount, int sortBufferMemoryPercentage) {
// We reserve a percentage of the size of a buffer for the sort keys array. That is, we are assuming that for every line
// in the sort buffer, the memory needed to store the SortKey of the path section of the line will not be more
// than sortBufferMemoryPercentage of the total size of the line in average
// Estimate memory needed by the sort keys array. We assume each entry requires 256 bytes.
long approxNseBufferSize = limitToIntegerRange(nseWorkingMemoryBytes / nseBuffersCount);
return approxNseBufferSize * sortBufferMemoryPercentage / 100;
}
private int readNSEBuffersReservedMemory() {
int workingMemoryMB = ConfigHelper.getSystemPropertyAsInt(OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB, DEFAULT_OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB);
Validate.checkArgument(workingMemoryMB >= 0,
"Invalid value for property " + OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB + ": " + workingMemoryMB + ". Must be >= 0");
indexingReporter.addConfig(OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB, workingMemoryMB);
if (workingMemoryMB == 0) {
return autodetectWorkingMemoryMB();
} else {
return workingMemoryMB;
}
}
private int autodetectWorkingMemoryMB() {
int maxHeapSizeMB = (int) (Runtime.getRuntime().maxMemory() / FileUtils.ONE_MB);
int workingMemoryMB = maxHeapSizeMB - 2048;
LOG.info("Auto detecting working memory. Maximum heap size: {} MB, selected working memory: {} MB", maxHeapSizeMB, workingMemoryMB);
if (workingMemoryMB > MAX_AUTODETECT_WORKING_MEMORY_MB) {
LOG.warn("Auto-detected value for working memory too high, setting to the maximum allowed for auto-detection: {} MB", MAX_AUTODETECT_WORKING_MEMORY_MB);
return MAX_AUTODETECT_WORKING_MEMORY_MB;
}
if (workingMemoryMB < MIN_AUTODETECT_WORKING_MEMORY_MB) {
LOG.warn("Auto-detected value for working memory too low, setting to the minimum allowed for auto-detection: {} MB", MIN_AUTODETECT_WORKING_MEMORY_MB);
return MIN_AUTODETECT_WORKING_MEMORY_MB;
}
return workingMemoryMB;
}
private static int limitToIntegerRange(long bufferSizeBytes) {
if (bufferSizeBytes > Integer.MAX_VALUE) {
// Probably not necessary to subtract 16, just a safeguard to avoid boundary conditions.
int truncatedBufferSize = Integer.MAX_VALUE - 16;
LOG.warn("Computed buffer size too big: {}, exceeds Integer.MAX_VALUE. Truncating to: {}", bufferSizeBytes, truncatedBufferSize);
return truncatedBufferSize;
} else {
return (int) bufferSizeBytes;
}
}
@Override
public File createSortedStoreFile() throws IOException {
int numberOfThreads = 1 + numberOfTransformThreads + 1 + 1; // dump, transform, sort threads, sorted files merge
ThreadMonitor threadMonitor = ThreadMonitor.newInstance();
var threadFactory = new ThreadMonitor.AutoRegisteringThreadFactory(threadMonitor, BasicThreadFactory.builder().daemon().build());
ExecutorService threadPool = Executors.newFixedThreadPool(numberOfThreads, threadFactory);
MongoDocumentFilter documentFilter = new MongoDocumentFilter(filteredPath, suffixesToSkip);
NodeDocumentCodec nodeDocumentCodec = new NodeDocumentCodec(docStore, Collection.NODES, documentFilter, MongoClientSettings.getDefaultCodecRegistry());
// This executor can wait for several tasks at the same time. We use this below to wait at the same time for
// all the tasks, so that if one of them fails, we can abort the whole pipeline. Otherwise, if we wait on
// Future instances, we can only wait on one of them, so that if any of the others fail, we have no easy way
// to detect this failure.
@SuppressWarnings("rawtypes")
ExecutorCompletionService ecs = new ExecutorCompletionService<>(threadPool);
try {
// download -> transform thread.
ArrayBlockingQueue<RawBsonDocument[]> mongoDocQueue = new ArrayBlockingQueue<>(mongoDocQueueSize);
// transform <-> sort and save threads
// Queue with empty buffers, used by the transform task
ArrayBlockingQueue<NodeStateEntryBatch> emptyBatchesQueue = new ArrayBlockingQueue<>(nseBuffersCount);
// Queue with buffers filled by the transform task, used by the sort and save task. +1 for the SENTINEL
ArrayBlockingQueue<NodeStateEntryBatch> nonEmptyBatchesQueue = new ArrayBlockingQueue<>(nseBuffersCount + 1);
// Queue between sort-and-save thread and the merge-sorted-files thread
ArrayBlockingQueue<Path> sortedFilesQueue = new ArrayBlockingQueue<>(64);
TransformStageStatistics transformStageStatistics = new TransformStageStatistics();
// Create empty buffers
for (int i = 0; i < nseBuffersCount; i++) {
// No limits on the number of entries, only on their total size. This might be revised later.
emptyBatchesQueue.add(NodeStateEntryBatch.createNodeStateEntryBatch(nseBuffersSizeBytes, Integer.MAX_VALUE));
}
INDEXING_PHASE_LOGGER.info("[TASK:PIPELINED-DUMP:START] Starting to build FFS");
Stopwatch start = Stopwatch.createStarted();
@SuppressWarnings("unchecked")
Future<PipelinedMongoDownloadTask.Result> downloadFuture = ecs.submit(new PipelinedMongoDownloadTask(
mongoClientURI,
docStore,
(int) (mongoDocBatchMaxSizeMB * FileUtils.ONE_MB),
mongoDocBatchMaxNumberOfDocuments,
mongoDocQueue,
pathFilters,
statisticsProvider,
indexingReporter,
threadFactory
));
ArrayList<Future<PipelinedTransformTask.Result>> transformFutures = new ArrayList<>(numberOfTransformThreads);
for (int i = 0; i < numberOfTransformThreads; i++) {
NodeStateEntryWriter entryWriter = new NodeStateEntryWriter(blobStore);
@SuppressWarnings("unchecked")
Future<PipelinedTransformTask.Result> future = ecs.submit(new PipelinedTransformTask(
docStore,
documentNodeStore,
nodeDocumentCodec,
rootRevision,
this.getPathPredicate(),
entryWriter,
mongoDocQueue,
emptyBatchesQueue,
nonEmptyBatchesQueue,
transformStageStatistics
));
transformFutures.add(future);
}
@SuppressWarnings("unchecked")
Future<PipelinedSortBatchTask.Result> sortBatchFuture = ecs.submit(new PipelinedSortBatchTask(
this.getStoreDir().toPath(),
pathComparator,
this.getAlgorithm(),
emptyBatchesQueue,
nonEmptyBatchesQueue,
sortedFilesQueue,
statisticsProvider,
indexingReporter
));
PipelinedMergeSortTask mergeSortTask = new PipelinedMergeSortTask(
this.getStoreDir().toPath(),
pathComparator,
this.getAlgorithm(),
sortedFilesQueue,
statisticsProvider,
indexingReporter);
@SuppressWarnings("unchecked")
Future<PipelinedMergeSortTask.Result> mergeSortFuture = ecs.submit(mergeSortTask);
Path flatFileStore = null;
try {
LOG.info("Waiting for tasks to complete");
int tasksFinished = 0;
int transformTasksFinished = 0;
boolean monitorQueues = true;
threadMonitor.start();
while (tasksFinished < numberOfThreads) {
// Wait with a timeout to print statistics periodically
Future<?> completedTask = ecs.poll(60, TimeUnit.SECONDS);
if (completedTask == null) {
// Timeout waiting for a task to complete
if (monitorQueues) {
try {
LOG.info(threadMonitor.printStatistics());
printStatistics(mongoDocQueue, emptyBatchesQueue, nonEmptyBatchesQueue, sortedFilesQueue, transformStageStatistics, false);
} catch (Exception e) {
LOG.warn("Error while logging queue sizes", e);
}
LOG.info("Documents filtered: docsFiltered: {}, longPathsFiltered: {}, filteredRenditionsTotal (top 10): {}",
documentFilter.getSkippedFields(), documentFilter.getLongPathSkipped(), documentFilter.formatTopK(10));
}
} else {
try {
Object result = completedTask.get();
if (result instanceof PipelinedMongoDownloadTask.Result) {
PipelinedMongoDownloadTask.Result downloadResult = (PipelinedMongoDownloadTask.Result) result;
LOG.info("Download finished. Documents downloaded: {}", downloadResult.getDocumentsDownloaded());
mergeSortTask.stopEagerMerging();
downloadFuture = null;
} else if (result instanceof PipelinedTransformTask.Result) {
PipelinedTransformTask.Result transformResult = (PipelinedTransformTask.Result) result;
transformTasksFinished++;
nodeStateEntriesExtracted += transformResult.getEntryCount();
LOG.info("Transform task {} finished. Entries processed: {}",
transformResult.getThreadId(), transformResult.getEntryCount());
if (transformTasksFinished == numberOfTransformThreads) {
LOG.info("All transform tasks finished. Total entries processed: {}", nodeStateEntriesExtracted);
// No need to keep monitoring the queues, the download and transform threads are done.
monitorQueues = false;
// Terminate the sort thread.
nonEmptyBatchesQueue.put(SENTINEL_NSE_BUFFER);
transformStageStatistics.publishStatistics(statisticsProvider, indexingReporter);
transformFutures.clear();
}
} else if (result instanceof PipelinedSortBatchTask.Result) {
PipelinedSortBatchTask.Result sortTaskResult = (PipelinedSortBatchTask.Result) result;
LOG.info("Sort batch task finished. Entries processed: {}", sortTaskResult.getTotalEntries());
sortedFilesQueue.put(SENTINEL_SORTED_FILES_QUEUE);
// The buffers between transform and merge sort tasks are no longer needed, so remove them
// from the queues so they can be garbage collected.
// These buffers can be very large, so this is important to avoid running out of memory in
// the merge-sort phase
if (!nonEmptyBatchesQueue.isEmpty()) {
LOG.warn("emptyBatchesQueue is not empty. Size: {}", emptyBatchesQueue.size());
}
emptyBatchesQueue.clear();
printStatistics(mongoDocQueue, emptyBatchesQueue, nonEmptyBatchesQueue, sortedFilesQueue, transformStageStatistics, true);
sortBatchFuture = null;
} else if (result instanceof PipelinedMergeSortTask.Result) {
PipelinedMergeSortTask.Result mergeSortedFilesTask = (PipelinedMergeSortTask.Result) result;
Path ffs = mergeSortedFilesTask.getFlatFileStoreFile();
LOG.info("Merge-sort sort task finished. FFS: {}, Size: {}", ffs, humanReadableByteCountBin(Files.size(ffs)));
flatFileStore = mergeSortedFilesTask.getFlatFileStoreFile();
mergeSortFuture = null;
} else {
throw new RuntimeException("Unknown result type: " + result);
}
tasksFinished++;
} catch (ExecutionException ex) {
throw new RuntimeException(ex.getCause());
} catch (Throwable ex) {
throw new RuntimeException(ex);
}
}
}
long elapsedSeconds = start.elapsed(TimeUnit.SECONDS);
INDEXING_PHASE_LOGGER.info("[TASK:PIPELINED-DUMP:END] Metrics: {}", MetricsFormatter.newBuilder()
.add("duration", FormattingUtils.formatToSeconds(elapsedSeconds))
.add("durationSeconds", elapsedSeconds)
.add("nodeStateEntriesExtracted", nodeStateEntriesExtracted)
.build());
indexingReporter.addTiming("Build FFS (Dump+Merge)", FormattingUtils.formatToSeconds(elapsedSeconds));
// Unique heading to make it easier to find in the logs
LOG.info(threadMonitor.printStatistics("Final Thread/Memory report"));
LOG.info("Documents filtered: docsFiltered: {}, longPathsFiltered: {}, filteredRenditionsTotal (top 10): {}",
documentFilter.getSkippedFields(), documentFilter.getLongPathSkipped(), documentFilter.formatTopK(10));
LOG.info("[INDEXING_REPORT:BUILD_FFS]\n{}", indexingReporter.generateReport());
} catch (Throwable e) {
INDEXING_PHASE_LOGGER.info("[TASK:PIPELINED-DUMP:FAIL] Metrics: {}, Error: {}",
MetricsFormatter.createMetricsWithDurationOnly(start), e.toString()
);
LOG.warn("Error dumping from MongoDB. Cancelling all tasks. Error: {}", e.toString());
// Cancel in order
cancelFuture(downloadFuture);
for (Future<?> transformTask : transformFutures) {
cancelFuture(transformTask);
}
cancelFuture(sortBatchFuture);
cancelFuture(mergeSortFuture);
throw new RuntimeException(e);
}
return flatFileStore.toFile();
} finally {
LOG.info("Shutting down build FFS thread pool");
new ExecutorCloser(threadPool).close();
}
}
private void cancelFuture(Future<?> future) {
if (future != null) {
LOG.info("Cancelling future: {}", future);
future.cancel(true);
}
}
@Override
public long getEntryCount() {
return nodeStateEntriesExtracted;
}
}
|
apache/poi | 36,193 | poi-ooxml/src/main/java/org/apache/poi/xslf/usermodel/XSLFTextParagraph.java | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.xslf.usermodel;
import java.awt.Color;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.apache.poi.ooxml.util.POIXMLUnits;
import org.apache.poi.sl.draw.DrawPaint;
import org.apache.poi.sl.usermodel.AutoNumberingScheme;
import org.apache.poi.sl.usermodel.PaintStyle;
import org.apache.poi.sl.usermodel.PaintStyle.SolidPaint;
import org.apache.poi.sl.usermodel.TabStop.TabStopType;
import org.apache.poi.sl.usermodel.TextParagraph;
import org.apache.poi.util.Beta;
import org.apache.poi.util.Internal;
import org.apache.poi.util.Units;
import org.apache.poi.xslf.model.ParagraphPropertyFetcher;
import org.apache.poi.xslf.model.ParagraphPropertyFetcher.ParaPropFetcher;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlObject;
import org.openxmlformats.schemas.drawingml.x2006.main.*;
import org.openxmlformats.schemas.presentationml.x2006.main.CTPlaceholder;
import org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderType;
import static org.apache.poi.xssf.usermodel.XSSFRelation.NS_PRESENTATIONML;
/**
* Represents a paragraph of text within the containing text body.
* The paragraph is the highest level text separation mechanism.
*
* @since POI-3.8
*/
@Beta
public class XSLFTextParagraph implements TextParagraph<XSLFShape,XSLFTextParagraph,XSLFTextRun> {
private final CTTextParagraph _p;
private final List<XSLFTextRun> _runs;
private final XSLFTextShape _shape;
@FunctionalInterface
private interface Procedure {
void accept();
}
XSLFTextParagraph(CTTextParagraph p, XSLFTextShape shape) {
_p = p;
_runs = new ArrayList<>();
_shape = shape;
try (XmlCursor c = _p.newCursor()) {
if (c.toFirstChild()) {
do {
XmlObject r = c.getObject();
if (r instanceof CTTextLineBreak) {
_runs.add(new XSLFLineBreak((CTTextLineBreak)r, this));
} else if (r instanceof CTRegularTextRun || r instanceof CTTextField) {
_runs.add(newTextRun(r));
}
} while (c.toNextSibling());
}
}
}
public String getText() {
StringBuilder out = new StringBuilder();
for (XSLFTextRun r : _runs) {
out.append(r.getRawText());
}
return out.toString();
}
@Internal
public CTTextParagraph getXmlObject() {
return _p;
}
@Override
public XSLFTextShape getParentShape() {
return _shape;
}
@Override
public List<XSLFTextRun> getTextRuns() {
return Collections.unmodifiableList(_runs);
}
@Override
public Iterator<XSLFTextRun> iterator() {
return getTextRuns().iterator();
}
/**
* Add a new run of text
*
* @return a new run of text
*/
public XSLFTextRun addNewTextRun() {
CTRegularTextRun r = _p.addNewR();
CTTextCharacterProperties rPr = r.addNewRPr();
rPr.setLang("en-US");
XSLFTextRun run = newTextRun(r);
_runs.add(run);
return run;
}
/**
* Remove a text run
*
* @param textRun a run of text
* @return whether the run was removed
* @since POI 5.2.2
*/
public boolean removeTextRun(XSLFTextRun textRun) {
if (_runs.remove(textRun)) {
XmlObject xo = textRun.getXmlObject();
if (xo instanceof CTRegularTextRun) {
for (int i = 0; i < getXmlObject().sizeOfRArray(); i++) {
if (getXmlObject().getRArray(i).equals(xo)) {
getXmlObject().removeR(i);
return true;
}
}
} else if (xo instanceof CTTextField) {
for (int i = 0; i < getXmlObject().sizeOfFldArray(); i++) {
if (getXmlObject().getFldArray(i).equals(xo)) {
getXmlObject().removeFld(i);
return true;
}
}
} else if (xo instanceof CTTextLineBreak) {
for (int i = 0; i < getXmlObject().sizeOfBrArray(); i++) {
if (getXmlObject().getBrArray(i).equals(xo)) {
getXmlObject().removeBr(i);
return true;
}
}
}
return false;
}
return false;
}
/**
* Insert a line break
*
* @return text run representing this line break ('\n')
*/
@SuppressWarnings("WeakerAccess")
public XSLFTextRun addLineBreak() {
XSLFLineBreak run = new XSLFLineBreak(_p.addNewBr(), this);
CTTextCharacterProperties brProps = run.getRPr(true);
if (!_runs.isEmpty()) {
// by default line break has the font size of the last text run
CTTextCharacterProperties prevRun = _runs.get(_runs.size() - 1).getRPr(true);
brProps.set(prevRun);
// don't copy hlink properties
if (brProps.isSetHlinkClick()) {
brProps.unsetHlinkClick();
}
if (brProps.isSetHlinkMouseOver()) {
brProps.unsetHlinkMouseOver();
}
}
_runs.add(run);
return run;
}
@Override
public TextAlign getTextAlign() {
return fetchParagraphProperty((props,val) -> {
if (props.isSetAlgn()) {
val.accept(TextAlign.values()[props.getAlgn().intValue() - 1]);
}
});
}
@Override
public void setTextAlign(TextAlign align) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if(align == null) {
if(pr.isSetAlgn()) {
pr.unsetAlgn();
}
} else {
pr.setAlgn(STTextAlignType.Enum.forInt(align.ordinal() + 1));
}
}
@Override
public FontAlign getFontAlign() {
return fetchParagraphProperty((props,val) -> {
if (props.isSetFontAlgn()) {
val.accept(FontAlign.values()[props.getFontAlgn().intValue() - 1]);
}
});
}
/**
* Specifies the font alignment that is to be applied to the paragraph.
* Possible values for this include auto, top, center, baseline and bottom.
* see {@link org.apache.poi.sl.usermodel.TextParagraph.FontAlign}.
*
* @param align font align
*/
@SuppressWarnings("unused")
public void setFontAlign(FontAlign align) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if(align == null) {
if(pr.isSetFontAlgn()) {
pr.unsetFontAlgn();
}
} else {
pr.setFontAlgn(STTextFontAlignType.Enum.forInt(align.ordinal() + 1));
}
}
/**
* @return the font to be used on bullet characters within a given paragraph
*/
@SuppressWarnings("WeakerAccess")
public String getBulletFont() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetBuFont()) {
val.accept(props.getBuFont().getTypeface());
}
});
}
@SuppressWarnings("WeakerAccess")
public void setBulletFont(String typeface) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
CTTextFont font = pr.isSetBuFont() ? pr.getBuFont() : pr.addNewBuFont();
font.setTypeface(typeface);
}
/**
* @return the character to be used in place of the standard bullet point
*/
@SuppressWarnings("WeakerAccess")
public String getBulletCharacter() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetBuChar()) {
val.accept(props.getBuChar().getChar());
}
});
}
@SuppressWarnings("WeakerAccess")
public void setBulletCharacter(String str) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
CTTextCharBullet c = pr.isSetBuChar() ? pr.getBuChar() : pr.addNewBuChar();
c.setChar(str);
}
/**
*
* @return the color of bullet characters within a given paragraph.
* A <code>null</code> value means to use the text font color.
*/
@SuppressWarnings("WeakerAccess")
public PaintStyle getBulletFontColor() {
Color col = fetchParagraphProperty(this::fetchBulletFontColor);
return (col == null) ? null : DrawPaint.createSolidPaint(col);
}
private void fetchBulletFontColor(CTTextParagraphProperties props, Consumer<Color> val) {
final XSLFSheet sheet = getParentShape().getSheet();
final XSLFTheme theme = sheet.getTheme();
if(props.isSetBuClr()) {
XSLFColor c = new XSLFColor(props.getBuClr(), theme, null, sheet);
val.accept(c.getColor());
}
}
@SuppressWarnings("WeakerAccess")
public void setBulletFontColor(Color color) {
setBulletFontColor(DrawPaint.createSolidPaint(color));
}
/**
* Set the color to be used on bullet characters within a given paragraph.
*
* @param color the bullet color
*/
@SuppressWarnings("WeakerAccess")
public void setBulletFontColor(PaintStyle color) {
if (!(color instanceof SolidPaint)) {
throw new IllegalArgumentException("Currently XSLF only supports SolidPaint");
}
// TODO: implement setting bullet color to null
SolidPaint sp = (SolidPaint)color;
Color col = DrawPaint.applyColorTransform(sp.getSolidColor());
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
CTColor c = pr.isSetBuClr() ? pr.getBuClr() : pr.addNewBuClr();
CTSRgbColor clr = c.isSetSrgbClr() ? c.getSrgbClr() : c.addNewSrgbClr();
clr.setVal(new byte[]{(byte) col.getRed(), (byte) col.getGreen(), (byte) col.getBlue()});
}
/**
* Returns the bullet size that is to be used within a paragraph.
* This may be specified in two different ways, percentage spacing and font point spacing:
* <p>
* If bulletSize >= 0, then bulletSize is a percentage of the font size.
* If bulletSize < 0, then it specifies the size in points
* </p>
*
* @return the bullet size
*/
@SuppressWarnings("WeakerAccess")
public Double getBulletFontSize() {
return fetchParagraphProperty(XSLFTextParagraph::fetchBulletFontSize);
}
private static void fetchBulletFontSize(CTTextParagraphProperties props, Consumer<Double> val) {
if(props.isSetBuSzPct()) {
val.accept(POIXMLUnits.parsePercent(props.getBuSzPct().xgetVal()) * 0.001);
}
if(props.isSetBuSzPts()) {
val.accept( - props.getBuSzPts().getVal() * 0.01);
}
}
/**
* Sets the bullet size that is to be used within a paragraph.
* This may be specified in two different ways, percentage spacing and font point spacing:
* <p>
* If bulletSize >= 0, then bulletSize is a percentage of the font size.
* If bulletSize < 0, then it specifies the size in points
* </p>
*/
@SuppressWarnings("WeakerAccess")
public void setBulletFontSize(double bulletSize) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if(bulletSize >= 0) {
CTTextBulletSizePercent pt = pr.isSetBuSzPct() ? pr.getBuSzPct() : pr.addNewBuSzPct();
pt.setVal(Integer.toString((int)(bulletSize*1000)));
if(pr.isSetBuSzPts()) {
pr.unsetBuSzPts();
}
} else {
CTTextBulletSizePoint pt = pr.isSetBuSzPts() ? pr.getBuSzPts() : pr.addNewBuSzPts();
pt.setVal((int)(-bulletSize*100));
if(pr.isSetBuSzPct()) {
pr.unsetBuSzPct();
}
}
}
/**
* @return the auto numbering scheme, or null if not defined
*/
@SuppressWarnings("WeakerAccess")
public AutoNumberingScheme getAutoNumberingScheme() {
return fetchParagraphProperty(XSLFTextParagraph::fetchAutoNumberingScheme);
}
private static void fetchAutoNumberingScheme(CTTextParagraphProperties props, Consumer<AutoNumberingScheme> val) {
if (props.isSetBuAutoNum()) {
AutoNumberingScheme ans = AutoNumberingScheme.forOoxmlID(props.getBuAutoNum().getType().intValue());
if (ans != null) {
val.accept(ans);
}
}
}
/**
* @return the auto numbering starting number, or null if not defined
*/
@SuppressWarnings("WeakerAccess")
public Integer getAutoNumberingStartAt() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetBuAutoNum() && props.getBuAutoNum().isSetStartAt()) {
val.accept(props.getBuAutoNum().getStartAt());
}
});
}
@Override
public void setIndent(Double indent) {
if ((indent == null) && !_p.isSetPPr()) {
return;
}
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if(indent == null) {
if(pr.isSetIndent()) {
pr.unsetIndent();
}
} else {
pr.setIndent(Units.toEMU(indent));
}
}
@Override
public Double getIndent() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetIndent()) {
val.accept(Units.toPoints(props.getIndent()));
}
});
}
@Override
public void setLeftMargin(Double leftMargin) {
if (leftMargin == null && !_p.isSetPPr()) {
return;
}
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if (leftMargin == null) {
if(pr.isSetMarL()) {
pr.unsetMarL();
}
} else {
pr.setMarL(Units.toEMU(leftMargin));
}
}
/**
* @return the left margin (in points) of the paragraph, null if unset
*/
@Override
public Double getLeftMargin() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetMarL()) {
val.accept(Units.toPoints(props.getMarL()));
}
});
}
@Override
public void setRightMargin(Double rightMargin) {
if (rightMargin == null && !_p.isSetPPr()) {
return;
}
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if(rightMargin == null) {
if(pr.isSetMarR()) {
pr.unsetMarR();
}
} else {
pr.setMarR(Units.toEMU(rightMargin));
}
}
/**
*
* @return the right margin of the paragraph, null if unset
*/
@Override
public Double getRightMargin() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetMarR()) {
val.accept(Units.toPoints(props.getMarR()));
}
});
}
@Override
public Double getDefaultTabSize() {
return fetchParagraphProperty((props, val) -> {
if (props.isSetDefTabSz()) {
val.accept(Units.toPoints(POIXMLUnits.parseLength(props.xgetDefTabSz())));
}
});
}
@SuppressWarnings("WeakerAccess")
public double getTabStop(final int idx) {
Double d = fetchParagraphProperty((props,val) -> fetchTabStop(idx,props,val));
return (d == null) ? 0. : d;
}
private static void fetchTabStop(final int idx, CTTextParagraphProperties props, Consumer<Double> val) {
if (props.isSetTabLst()) {
CTTextTabStopList tabStops = props.getTabLst();
if(idx < tabStops.sizeOfTabArray() ) {
CTTextTabStop ts = tabStops.getTabArray(idx);
val.accept(Units.toPoints(POIXMLUnits.parseLength(ts.xgetPos())));
}
}
}
@SuppressWarnings("WeakerAccess")
public void addTabStop(double value) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
CTTextTabStopList tabStops = pr.isSetTabLst() ? pr.getTabLst() : pr.addNewTabLst();
tabStops.addNewTab().setPos(Units.toEMU(value));
}
@Override
public void setLineSpacing(Double lineSpacing) {
setSpacing(lineSpacing, props -> props::getLnSpc, props -> props::addNewLnSpc, props -> props::unsetLnSpc);
}
@Override
public Double getLineSpacing() {
final Double lnSpc = getSpacing(props -> props::getLnSpc);
if (lnSpc != null && lnSpc > 0) {
// check if the percentage value is scaled
final CTTextNormalAutofit normAutofit = getParentShape().getTextBodyPr().getNormAutofit();
if (normAutofit != null) {
final double scale = 1 - POIXMLUnits.parsePercent(normAutofit.xgetLnSpcReduction()) / 100_000.;
return lnSpc * scale;
}
}
return lnSpc;
}
@Override
public void setSpaceBefore(Double spaceBefore) {
setSpacing(spaceBefore, props -> props::getSpcBef, props -> props::addNewSpcBef, props -> props::unsetSpcBef);
}
@Override
public Double getSpaceBefore() {
return getSpacing(props -> props::getSpcBef);
}
@Override
public void setSpaceAfter(Double spaceAfter) {
setSpacing(spaceAfter, props -> props::getSpcAft, props -> props::addNewSpcAft, props -> props::unsetSpcAft);
}
@Override
public Double getSpaceAfter() {
return getSpacing(props -> props::getSpcAft);
}
private void setSpacing(final Double space,
final Function<CTTextParagraphProperties,Supplier<CTTextSpacing>> getSpc,
final Function<CTTextParagraphProperties,Supplier<CTTextSpacing>> addSpc,
final Function<CTTextParagraphProperties,Procedure> unsetSpc
) {
final CTTextParagraphProperties pPr = (space == null || _p.isSetPPr()) ? _p.getPPr() : _p.addNewPPr();
if (pPr == null) {
return;
}
CTTextSpacing spc = getSpc.apply(pPr).get();
if (space == null) {
if (spc != null) {
// unset the space before on null input
unsetSpc.apply(pPr).accept();
}
return;
}
if (spc == null) {
spc = addSpc.apply(pPr).get();
}
if (space >= 0) {
if (spc.isSetSpcPts()) {
spc.unsetSpcPts();
}
final CTTextSpacingPercent pct = spc.isSetSpcPct() ? spc.getSpcPct() : spc.addNewSpcPct();
pct.setVal((int)(space*1000));
} else {
if (spc.isSetSpcPct()) {
spc.unsetSpcPct();
}
final CTTextSpacingPoint pts = spc.isSetSpcPts() ? spc.getSpcPts() : spc.addNewSpcPts();
pts.setVal((int)(-space*100));
}
}
private Double getSpacing(final Function<CTTextParagraphProperties,Supplier<CTTextSpacing>> getSpc) {
return fetchParagraphProperty((props,val) -> fetchSpacing(getSpc,props,val));
}
private static void fetchSpacing(final Function<CTTextParagraphProperties,Supplier<CTTextSpacing>> getSpc,
CTTextParagraphProperties props, Consumer<Double> val) {
final CTTextSpacing spc = getSpc.apply(props).get();
if (spc != null) {
if (spc.isSetSpcPct()) {
val.accept( POIXMLUnits.parsePercent(spc.getSpcPct().xgetVal())*0.001 );
} else if (spc.isSetSpcPts()) {
val.accept( -spc.getSpcPts().getVal()*0.01 );
}
}
}
@Override
public void setIndentLevel(int level) {
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
pr.setLvl(level);
}
@Override
public int getIndentLevel() {
CTTextParagraphProperties pr = _p.getPPr();
return (pr == null || !pr.isSetLvl()) ? 0 : pr.getLvl();
}
/**
* Returns whether this paragraph has bullets
*/
public boolean isBullet() {
Boolean b = fetchParagraphProperty(XSLFTextParagraph::fetchIsBullet);
return b == null ? false : b;
}
private static void fetchIsBullet(CTTextParagraphProperties props, Consumer<Boolean> val) {
if (props.isSetBuNone()) {
val.accept(false);
} else if(props.isSetBuFont() || props.isSetBuChar()) {
val.accept(true);
}
}
/**
*
* @param flag whether text in this paragraph has bullets
*/
public void setBullet(boolean flag) {
if(isBullet() == flag) {
return;
}
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
if(flag) {
pr.addNewBuFont().setTypeface("Arial");
pr.addNewBuChar().setChar("\u2022");
} else {
if (pr.isSetBuFont()) {
pr.unsetBuFont();
}
if (pr.isSetBuChar()) {
pr.unsetBuChar();
}
if (pr.isSetBuAutoNum()) {
pr.unsetBuAutoNum();
}
if (pr.isSetBuBlip()) {
pr.unsetBuBlip();
}
if (pr.isSetBuClr()) {
pr.unsetBuClr();
}
if (pr.isSetBuClrTx()) {
pr.unsetBuClrTx();
}
if (pr.isSetBuFont()) {
pr.unsetBuFont();
}
if (pr.isSetBuFontTx()) {
pr.unsetBuFontTx();
}
if (pr.isSetBuSzPct()) {
pr.unsetBuSzPct();
}
if (pr.isSetBuSzPts()) {
pr.unsetBuSzPts();
}
if (pr.isSetBuSzTx()) {
pr.unsetBuSzTx();
}
pr.addNewBuNone();
}
}
/**
* Specifies that automatic numbered bullet points should be applied to this paragraph
*
* @param scheme type of auto-numbering
* @param startAt the number that will start number for a given sequence of automatically
numbered bullets (1-based).
*/
@SuppressWarnings("WeakerAccess")
public void setBulletAutoNumber(AutoNumberingScheme scheme, int startAt) {
if(startAt < 1) {
throw new IllegalArgumentException("Start Number must be greater or equal that 1") ;
}
CTTextParagraphProperties pr = _p.isSetPPr() ? _p.getPPr() : _p.addNewPPr();
CTTextAutonumberBullet lst = pr.isSetBuAutoNum() ? pr.getBuAutoNum() : pr.addNewBuAutoNum();
lst.setType(STTextAutonumberScheme.Enum.forInt(scheme.ooxmlId));
lst.setStartAt(startAt);
}
@Override
public String toString() {
return "[" + getClass() + "]" + getText();
}
/**
* @return master style text paragraph properties, or <code>null</code> if
* there are no master slides or the master slides do not contain a text paragraph
*/
@Internal
public CTTextParagraphProperties getDefaultMasterStyle() {
CTPlaceholder ph = _shape.getPlaceholderDetails().getCTPlaceholder(false);
String defaultStyleSelector;
switch(ph == null ? -1 : ph.getType().intValue()) {
case STPlaceholderType.INT_TITLE:
case STPlaceholderType.INT_CTR_TITLE:
defaultStyleSelector = "titleStyle";
break;
case -1: // no placeholder means plain text box
case STPlaceholderType.INT_FTR:
case STPlaceholderType.INT_SLD_NUM:
case STPlaceholderType.INT_DT:
defaultStyleSelector = "otherStyle";
break;
default:
defaultStyleSelector = "bodyStyle";
break;
}
int level = getIndentLevel();
// wind up and find the root master sheet which must be slide master
final String nsPML = NS_PRESENTATIONML;
XSLFSheet masterSheet = _shape.getSheet();
for (XSLFSheet m = masterSheet; m != null; m = (XSLFSheet)m.getMasterSheet()) {
masterSheet = m;
XmlObject xo = masterSheet.getXmlObject();
try (XmlCursor cur = xo.newCursor()) {
cur.push();
if ((cur.toChild(nsPML, "txStyles") && cur.toChild(nsPML, defaultStyleSelector)) ||
(cur.pop() && cur.toChild(nsPML, "notesStyle"))) {
while (level >= 0) {
cur.push();
if (cur.toChild(XSLFRelation.NS_DRAWINGML, "lvl" +(level+1)+ "pPr")) {
return (CTTextParagraphProperties)cur.getObject();
}
cur.pop();
level--;
}
}
}
}
return null;
}
private <T> T fetchParagraphProperty(ParaPropFetcher<T> fetcher) {
final XSLFTextShape shape = getParentShape();
return new ParagraphPropertyFetcher<>(this, fetcher).fetchProperty(shape);
}
void copy(XSLFTextParagraph other) {
if (other == this) {
return;
}
CTTextParagraph thisP = getXmlObject();
CTTextParagraph otherP = other.getXmlObject();
if (thisP.isSetPPr()) {
thisP.unsetPPr();
}
if (thisP.isSetEndParaRPr()) {
thisP.unsetEndParaRPr();
}
_runs.clear();
for (int i=thisP.sizeOfBrArray(); i>0; i--) {
thisP.removeBr(i-1);
}
for (int i=thisP.sizeOfRArray(); i>0; i--) {
thisP.removeR(i-1);
}
for (int i=thisP.sizeOfFldArray(); i>0; i--) {
thisP.removeFld(i-1);
}
for (XSLFTextRun tr : other.getTextRuns()) {
XmlObject xo = tr.getXmlObject().copy();
XSLFTextRun run = addNewTextRun();
run.getXmlObject().set(xo);
run.copy(tr);
}
// set properties again, in case we are based on a different
// template
TextAlign srcAlign = other.getTextAlign();
if(srcAlign != getTextAlign()) {
setTextAlign(srcAlign);
}
boolean isBullet = other.isBullet();
if(isBullet != isBullet()) {
setBullet(isBullet);
if(isBullet) {
String buFont = other.getBulletFont();
if(buFont != null && !buFont.equals(getBulletFont())) {
setBulletFont(buFont);
}
String buChar = other.getBulletCharacter();
if(buChar != null && !buChar.equals(getBulletCharacter())) {
setBulletCharacter(buChar);
}
PaintStyle buColor = other.getBulletFontColor();
if(buColor != null && !buColor.equals(getBulletFontColor())) {
setBulletFontColor(buColor);
}
Double buSize = other.getBulletFontSize();
if(doubleNotEquals(buSize, getBulletFontSize())) {
setBulletFontSize(buSize);
}
}
}
Double leftMargin = other.getLeftMargin();
if (doubleNotEquals(leftMargin, getLeftMargin())) {
setLeftMargin(leftMargin);
}
Double indent = other.getIndent();
if (doubleNotEquals(indent, getIndent())) {
setIndent(indent);
}
Double spaceAfter = other.getSpaceAfter();
if (doubleNotEquals(spaceAfter, getSpaceAfter())) {
setSpaceAfter(spaceAfter);
}
Double spaceBefore = other.getSpaceBefore();
if (doubleNotEquals(spaceBefore, getSpaceBefore())) {
setSpaceBefore(spaceBefore);
}
Double lineSpacing = other.getLineSpacing();
if (doubleNotEquals(lineSpacing, getLineSpacing())) {
setLineSpacing(lineSpacing);
}
}
private static boolean doubleNotEquals(Double d1, Double d2) {
return !Objects.equals(d1, d2);
}
@Override
public Double getDefaultFontSize() {
CTTextCharacterProperties endPr = _p.getEndParaRPr();
if (endPr == null || !endPr.isSetSz()) {
// inherit the font size from the master style
CTTextParagraphProperties masterStyle = getDefaultMasterStyle();
if (masterStyle != null) {
endPr = masterStyle.getDefRPr();
}
}
return (endPr == null || !endPr.isSetSz()) ? 12 : (endPr.getSz() / 100.);
}
@Override
public String getDefaultFontFamily() {
String family = (_runs.isEmpty() ? null : _runs.get(0).getFontFamily());
return (family == null) ? "Arial" : family;
}
@Override
public BulletStyle getBulletStyle() {
if (!isBullet()) {
return null;
}
return new BulletStyle() {
@Override
public String getBulletCharacter() {
return XSLFTextParagraph.this.getBulletCharacter();
}
@Override
public String getBulletFont() {
return XSLFTextParagraph.this.getBulletFont();
}
@Override
public Double getBulletFontSize() {
return XSLFTextParagraph.this.getBulletFontSize();
}
@Override
public PaintStyle getBulletFontColor() {
return XSLFTextParagraph.this.getBulletFontColor();
}
@Override
public void setBulletFontColor(Color color) {
setBulletFontColor(DrawPaint.createSolidPaint(color));
}
@Override
public void setBulletFontColor(PaintStyle color) {
XSLFTextParagraph.this.setBulletFontColor(color);
}
@Override
public AutoNumberingScheme getAutoNumberingScheme() {
return XSLFTextParagraph.this.getAutoNumberingScheme();
}
@Override
public Integer getAutoNumberingStartAt() {
return XSLFTextParagraph.this.getAutoNumberingStartAt();
}
};
}
@Override
public void setBulletStyle(Object... styles) {
if (styles.length == 0) {
setBullet(false);
} else {
setBullet(true);
for (Object ostyle : styles) {
if (ostyle instanceof Number) {
setBulletFontSize(((Number)ostyle).doubleValue());
} else if (ostyle instanceof Color) {
setBulletFontColor((Color)ostyle);
} else if (ostyle instanceof Character) {
setBulletCharacter(ostyle.toString());
} else if (ostyle instanceof String) {
setBulletFont((String)ostyle);
} else if (ostyle instanceof AutoNumberingScheme) {
setBulletAutoNumber((AutoNumberingScheme)ostyle, 1);
}
}
}
}
@Override
public List<XSLFTabStop> getTabStops() {
return fetchParagraphProperty(XSLFTextParagraph::fetchTabStops);
}
private static void fetchTabStops(CTTextParagraphProperties props, Consumer<List<XSLFTabStop>> val) {
if (props.isSetTabLst()) {
final List<XSLFTabStop> list = new ArrayList<>();
//noinspection deprecation
for (final CTTextTabStop ta : props.getTabLst().getTabArray()) {
list.add(new XSLFTabStop(ta));
}
val.accept(list);
}
}
@Override
public void addTabStops(double positionInPoints, TabStopType tabStopType) {
final XSLFSheet sheet = getParentShape().getSheet();
final CTTextParagraphProperties tpp;
if (sheet instanceof XSLFSlideMaster) {
tpp = getDefaultMasterStyle();
} else {
final CTTextParagraph xo = getXmlObject();
tpp = (xo.isSetPPr()) ? xo.getPPr() : xo.addNewPPr();
}
if (tpp == null) {
return;
}
final CTTextTabStopList stl = (tpp.isSetTabLst()) ? tpp.getTabLst() : tpp.addNewTabLst();
XSLFTabStop tab = new XSLFTabStop(stl.addNewTab());
tab.setPositionInPoints(positionInPoints);
tab.setType(tabStopType);
}
@Override
public void clearTabStops() {
final XSLFSheet sheet = getParentShape().getSheet();
CTTextParagraphProperties tpp = (sheet instanceof XSLFSlideMaster) ? getDefaultMasterStyle() : getXmlObject().getPPr();
if (tpp != null && tpp.isSetTabLst()) {
tpp.unsetTabLst();
}
}
/**
* Helper method for appending text and keeping paragraph and character properties.
* The character properties are moved to the end paragraph marker
*/
/* package */ void clearButKeepProperties() {
CTTextParagraph thisP = getXmlObject();
for (int i=thisP.sizeOfBrArray(); i>0; i--) {
thisP.removeBr(i-1);
}
for (int i=thisP.sizeOfFldArray(); i>0; i--) {
thisP.removeFld(i-1);
}
if (!_runs.isEmpty()) {
int size = _runs.size();
XSLFTextRun lastRun = _runs.get(size-1);
CTTextCharacterProperties cpOther = lastRun.getRPr(false);
if (cpOther != null) {
if (thisP.isSetEndParaRPr()) {
thisP.unsetEndParaRPr();
}
CTTextCharacterProperties cp = thisP.addNewEndParaRPr();
cp.set(cpOther);
}
for (int i=size; i>0; i--) {
thisP.removeR(i-1);
}
_runs.clear();
}
}
@Override
public boolean isHeaderOrFooter() {
CTPlaceholder ph = _shape.getPlaceholderDetails().getCTPlaceholder(false);
int phId = (ph == null ? -1 : ph.getType().intValue());
switch (phId) {
case STPlaceholderType.INT_SLD_NUM:
case STPlaceholderType.INT_DT:
case STPlaceholderType.INT_FTR:
case STPlaceholderType.INT_HDR:
return true;
default:
return false;
}
}
/**
* Helper method to allow subclasses to provide their own text run
*
* @param r the xml reference
*
* @return a new text paragraph
*
* @since POI 3.15-beta2
*/
protected XSLFTextRun newTextRun(XmlObject r) {
return new XSLFTextRun(r, this);
}
@SuppressWarnings("WeakerAccess")
protected XSLFTextRun newTextRun(CTTextLineBreak r) {
return new XSLFLineBreak(r, this);
}
}
|
apache/ignite-3 | 36,163 | modules/network-annotation-processor/src/main/java/org/apache/ignite/internal/network/processor/messages/MessageImplGenerator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.network.processor.messages;
import static org.apache.ignite.internal.network.processor.MessageGeneratorUtils.BYTE_ARRAY_TYPE;
import static org.apache.ignite.internal.network.processor.MessageGeneratorUtils.addByteArrayPostfix;
import static org.apache.ignite.internal.network.processor.MessageGeneratorUtils.methodReturnsNullableValue;
import static org.apache.ignite.internal.network.processor.MessageGeneratorUtils.methodReturnsPrimitive;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.CodeBlock;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.MethodSpec.Builder;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import it.unimi.dsi.fastutil.ints.IntSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.MirroredTypeException;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
import org.apache.ignite.internal.network.NetworkMessage;
import org.apache.ignite.internal.network.annotations.Marshallable;
import org.apache.ignite.internal.network.annotations.WithSetter;
import org.apache.ignite.internal.network.processor.MessageClass;
import org.apache.ignite.internal.network.processor.MessageGroupWrapper;
import org.apache.ignite.internal.network.processor.ProcessingException;
import org.apache.ignite.internal.network.processor.TypeUtils;
import org.apache.ignite.internal.network.serialization.MessageSerializer;
import org.apache.ignite.internal.tostring.IgniteStringifier;
import org.apache.ignite.internal.tostring.IgniteToStringExclude;
import org.apache.ignite.internal.tostring.IgniteToStringInclude;
import org.apache.ignite.internal.tostring.S;
/**
* Class for generating implementations of the {@link NetworkMessage} interfaces and their builders, generated by a {@link
* MessageBuilderGenerator}.
*/
public class MessageImplGenerator {
/** Processing environment. */
private final ProcessingEnvironment processingEnv;
/** Message group. */
private final MessageGroupWrapper messageGroup;
private final TypeUtils typeUtils;
private final MarshallableTypesBlackList marshallableTypesBlackList;
/**
* Constructor.
*
* @param processingEnv Processing environment.
* @param messageGroup Message group.
*/
public MessageImplGenerator(ProcessingEnvironment processingEnv, MessageGroupWrapper messageGroup) {
this.processingEnv = processingEnv;
this.messageGroup = messageGroup;
this.typeUtils = new TypeUtils(processingEnv);
this.marshallableTypesBlackList = new MarshallableTypesBlackList(typeUtils);
}
/**
* Generates the implementation of a given Network Message interface and its Builder (as a nested class).
*
* @param message network message
* @param builderInterface generated builder interface
* @return {@code TypeSpec} of the generated message implementation
*/
public TypeSpec generateMessageImpl(MessageClass message, TypeSpec builderInterface) {
ClassName messageImplClassName = message.implClassName();
processingEnv.getMessager()
.printMessage(Diagnostic.Kind.NOTE, "Generating " + messageImplClassName);
List<ExecutableElement> getters = message.getters();
var fields = new ArrayList<FieldSpec>(getters.size());
var methodImpls = new ArrayList<MethodSpec>(getters.size());
var notNullFieldNames = new HashSet<String>();
var marshallableFieldNames = new HashSet<String>();
// create a field and a getter implementation for every getter in the message interface
for (ExecutableElement getter : getters) {
TypeMirror getterType = getter.getReturnType();
TypeName getterReturnType = TypeName.get(getterType);
String getterName = getter.getSimpleName().toString();
boolean isMarshallable = getter.getAnnotation(Marshallable.class) != null;
if (isMarshallable && !marshallableTypesBlackList.canBeMarshallable(getterType)) {
String error = String.format(
"\"%s\" field is marked as @Marshallable but this type is either directly supported by native serialization "
+ "or is prohibited by a blacklist, remove this annotation from the field",
getterName
);
throw new ProcessingException(error, null, getter);
}
FieldSpec.Builder fieldBuilder = FieldSpec.builder(getterReturnType, getterName)
.addModifiers(Modifier.PRIVATE);
if (getter.getAnnotation(IgniteToStringExclude.class) == null) {
IgniteToStringInclude includeAnnotation = getter.getAnnotation(IgniteToStringInclude.class);
IgniteStringifier stringifierAnnotation = getter.getAnnotation(IgniteStringifier.class);
if (stringifierAnnotation != null) {
AnnotationSpec annotationSpec = AnnotationSpec.builder(IgniteStringifier.class)
.addMember("name", "$S", stringifierAnnotation.name())
.addMember("value", "$T.class", igniteStringifierValueTypeMirror(stringifierAnnotation))
.build();
fieldBuilder.addAnnotation(annotationSpec);
} else if (includeAnnotation != null) {
fieldBuilder.addAnnotation(AnnotationSpec.get(includeAnnotation));
} else {
fieldBuilder.addAnnotation(AnnotationSpec.builder(IgniteToStringInclude.class).build());
}
} else {
fieldBuilder.addAnnotation(IgniteToStringExclude.class);
}
boolean generateSetter = getter.getAnnotation(WithSetter.class) != null;
if (!isMarshallable && !generateSetter) {
fieldBuilder.addModifiers(Modifier.FINAL);
}
if (requiresNotNullCheck(getter)) {
notNullFieldNames.add(getterName);
}
FieldSpec field = fieldBuilder.build();
fields.add(field);
if (isMarshallable) {
marshallableFieldNames.add(getterName);
String name = addByteArrayPostfix(getterName);
FieldSpec marshallableFieldArray = FieldSpec.builder(BYTE_ARRAY_TYPE, name)
.addModifiers(Modifier.PRIVATE)
.build();
fields.add(marshallableFieldArray);
MethodSpec baGetterImpl = MethodSpec.methodBuilder(name)
.returns(BYTE_ARRAY_TYPE)
.addStatement("return $N", marshallableFieldArray)
.build();
methodImpls.add(baGetterImpl);
}
if (generateSetter) {
MethodSpec setterImpl = MethodSpec.methodBuilder(getterName)
.returns(TypeName.VOID)
.addModifiers(Modifier.PUBLIC)
.addParameter(getterReturnType, getterName)
.addAnnotation(Override.class)
.addStatement("this.$L = $L", getterName, getterName)
.build();
methodImpls.add(setterImpl);
}
MethodSpec getterImpl = MethodSpec.overriding(getter)
.addStatement("return $N", field)
.build();
methodImpls.add(getterImpl);
}
TypeSpec.Builder messageImpl = TypeSpec.classBuilder(messageImplClassName)
.addModifiers(Modifier.PUBLIC)
.addSuperinterface(message.className())
.addSuperinterface(Cloneable.class)
.addFields(fields)
.addMethods(methodImpls)
.addMethod(constructor(fields, notNullFieldNames, marshallableFieldNames));
if (message.isAutoSerializable()) {
messageImpl.addMethod(MethodSpec.methodBuilder("serializer")
.returns(MessageSerializer.class)
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addCode("return $T.INSTANCE;", message.serializerClassName())
.build());
} else {
messageImpl.addMethod(MethodSpec.methodBuilder("serializer")
.returns(MessageSerializer.class)
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addCode("return null;")
.build());
}
// group type constant and getter
FieldSpec groupTypeField = FieldSpec.builder(short.class, "GROUP_TYPE")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL)
.initializer("$L", messageGroup.groupType())
.build();
messageImpl.addField(groupTypeField);
MethodSpec groupTypeMethod = MethodSpec.methodBuilder("groupType")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(short.class)
.addStatement("return $N", groupTypeField)
.build();
messageImpl.addMethod(groupTypeMethod);
MethodSpec toStringMethod = MethodSpec.methodBuilder("toString")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(String.class)
.addStatement("return $T.toString($T.class, this)", S.class, messageImplClassName)
.build();
messageImpl.addMethod(toStringMethod);
// message type constant and getter
FieldSpec messageTypeField = FieldSpec.builder(short.class, "TYPE")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL)
.initializer("$L", message.messageType())
.build();
messageImpl.addField(messageTypeField);
MethodSpec messageTypeMethod = MethodSpec.methodBuilder("messageType")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(short.class)
.addStatement("return $N", messageTypeField)
.build();
messageImpl.addMethod(messageTypeMethod);
// equals and hashCode
generateEqualsAndHashCode(messageImpl, message);
// generate clone
MethodSpec cloneMethod = MethodSpec.methodBuilder("clone")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(messageImplClassName)
.addCode(CodeBlock.builder()
.beginControlFlow("try")
.addStatement("return ($T) super.clone()", messageImplClassName)
.endControlFlow()
.beginControlFlow("catch (CloneNotSupportedException e)")
.addStatement("// Never expected to be thrown because whole message class hierarchy implements clone()")
.addStatement("throw new AssertionError(e)")
.endControlFlow()
.build())
.build();
messageImpl.addMethod(cloneMethod);
var builderName = ClassName.get(message.packageName(), builderInterface.name);
// nested builder interface and static factory method
TypeSpec builder = generateBuilderImpl(message, messageImplClassName, builderName, notNullFieldNames, marshallableFieldNames);
messageImpl.addType(builder);
MethodSpec builderMethod = MethodSpec.methodBuilder("builder")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.returns(builderName)
.addStatement("return new $N()", builder)
.build();
messageImpl.addMethod(builderMethod);
generatePrepareMarshal(messageImpl, message);
generateUnmarshalMethod(messageImpl, message);
messageImpl
.addOriginatingElement(message.element())
.addOriginatingElement(messageGroup.element());
return messageImpl.build();
}
/**
* Resolves type of an object to a type that may hold a message. Returns {@code null} if the type
* can't hold a message.
*
* @param parameterType Type.
* @return {@link MaybeMessageType} or {@code null} if the type can't hold a message.
*/
private Optional<MaybeMessageType> resolveType(TypeMirror parameterType) {
if (parameterType.getKind() == TypeKind.ARRAY) {
if (!((ArrayType) parameterType).getComponentType().getKind().isPrimitive()) {
return Optional.of(MaybeMessageType.OBJECT_ARRAY);
}
} else if (parameterType.getKind() == TypeKind.DECLARED) {
if (typeUtils.isSubType(parameterType, Collection.class)) {
return Optional.of(MaybeMessageType.COLLECTION);
} else if (typeUtils.isSameType(parameterType, Map.class)) {
return Optional.of(MaybeMessageType.MAP);
} else if (typeUtils.isSubType(parameterType, NetworkMessage.class)) {
return Optional.of(MaybeMessageType.MESSAGE);
}
}
return Optional.empty();
}
private void generatePrepareMarshal(TypeSpec.Builder messageImplBuild, MessageClass message) {
boolean isNeeded = false;
ClassName setType = ClassName.get(IntSet.class);
Builder prepareMarshal = MethodSpec.methodBuilder("prepareMarshal")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addException(Exception.class)
.addParameter(setType, "usedDescriptors")
.addParameter(Object.class, "marshallerObj");
String marshallerPackage = "org.apache.ignite.internal.network.serialization.marshal";
ClassName marshallerClass = ClassName.get(marshallerPackage, "UserObjectMarshaller");
ClassName marshalledObjectClass = ClassName.get(marshallerPackage, "MarshalledObject");
prepareMarshal.addStatement("$T marshaller = ($T) marshallerObj", marshallerClass, marshallerClass);
for (ExecutableElement executableElement : message.getters()) {
TypeMirror type = executableElement.getReturnType();
String objectName = executableElement.getSimpleName().toString();
if (executableElement.getAnnotation(Marshallable.class) != null) {
isNeeded = true;
String baName = addByteArrayPostfix(objectName);
String moName = baName + "mo";
prepareMarshal.addStatement("$T $N = marshaller.marshal($N)", marshalledObjectClass, moName, objectName);
prepareMarshal.addStatement("usedDescriptors.addAll($N.usedDescriptorIds())", moName);
prepareMarshal.addStatement("$N = $N.bytes()", baName, moName).addCode("\n");
} else {
Optional<MaybeMessageType> objectType = resolveType(type);
if (objectType.isEmpty()) {
continue;
}
switch (objectType.get()) {
case OBJECT_ARRAY:
isNeeded = generateObjectArrayHandler((ArrayType) type, prepareMarshal, objectName,
"prepareMarshal(usedDescriptors, marshaller)") || isNeeded;
break;
case COLLECTION:
isNeeded = generateCollectionHandler((DeclaredType) type, prepareMarshal, objectName,
"prepareMarshal(usedDescriptors, marshaller)") || isNeeded;
break;
case MESSAGE:
isNeeded = generateMessageHandler(prepareMarshal, objectName,
"prepareMarshal(usedDescriptors, marshaller)") || isNeeded;
break;
case MAP:
isNeeded = generateMapHandler(prepareMarshal, (DeclaredType) type, objectName,
"prepareMarshal(usedDescriptors, marshaller)") || isNeeded;
break;
default:
break;
}
}
}
if (isNeeded) {
messageImplBuild.addMethod(prepareMarshal.build());
}
}
private void generateUnmarshalMethod(TypeSpec.Builder messageImplBuild, MessageClass message) {
boolean isNeeded = false;
Builder unmarshal = MethodSpec.methodBuilder("unmarshal")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addException(Exception.class)
.addParameter(Object.class, "marshallerObj")
.addParameter(Object.class, "descriptorsObj");
String uosPackage = "org.apache.ignite.internal.network.serialization.marshal";
ClassName marshallerClass = ClassName.get(uosPackage, "UserObjectMarshaller");
unmarshal.addStatement("$T marshaller = ($T) marshallerObj", marshallerClass, marshallerClass);
for (ExecutableElement executableElement : message.getters()) {
TypeMirror type = executableElement.getReturnType();
String objectName = executableElement.getSimpleName().toString();
if (executableElement.getAnnotation(Marshallable.class) != null) {
isNeeded = true;
String baName = addByteArrayPostfix(objectName);
unmarshal.addStatement("$N = marshaller.unmarshal($N, descriptorsObj)", objectName, baName);
unmarshal.addStatement("$N = null", baName);
} else {
Optional<MaybeMessageType> objectType = resolveType(type);
if (objectType.isEmpty()) {
continue;
}
switch (objectType.get()) {
case OBJECT_ARRAY:
isNeeded = generateObjectArrayHandler((ArrayType) type, unmarshal, objectName,
"unmarshal(marshaller, descriptorsObj)") || isNeeded;
break;
case COLLECTION:
isNeeded = generateCollectionHandler((DeclaredType) type, unmarshal, objectName,
"unmarshal(marshaller, descriptorsObj)") || isNeeded;
break;
case MESSAGE:
isNeeded = generateMessageHandler(unmarshal, objectName,
"unmarshal(marshaller, descriptorsObj)") || isNeeded;
break;
case MAP:
isNeeded = generateMapHandler(unmarshal, (DeclaredType) type, objectName,
"unmarshal(marshaller, descriptorsObj)") || isNeeded;
break;
default:
break;
}
}
}
if (isNeeded) {
messageImplBuild.addMethod(unmarshal.build());
}
}
private boolean generateObjectArrayHandler(ArrayType type, Builder methodBuilder, String objectName, String code) {
TypeMirror componentType = type.getComponentType();
if (typeUtils.isSubType(componentType, NetworkMessage.class)) {
methodBuilder.beginControlFlow("if ($N != null)", objectName);
methodBuilder.beginControlFlow("for ($T obj : $N)", componentType, objectName);
methodBuilder.addStatement("if (obj != null) obj." + code);
methodBuilder.endControlFlow();
methodBuilder.endControlFlow().addCode("\n");
return true;
}
return false;
}
private boolean generateCollectionHandler(DeclaredType type, Builder methodBuilder, String objectName, String code) {
TypeMirror elementType = type.getTypeArguments().get(0);
if (typeUtils.isSubType(elementType, NetworkMessage.class)) {
methodBuilder.beginControlFlow("if ($N != null)", objectName);
methodBuilder.beginControlFlow("for ($T obj : $N)", elementType, objectName);
methodBuilder.addStatement("if (obj != null) obj." + code, objectName);
methodBuilder.endControlFlow();
methodBuilder.endControlFlow().addCode("\n");
return true;
}
return false;
}
private boolean generateMessageHandler(Builder methodBuilder, String objectName, String code) {
methodBuilder.addStatement("if ($N != null) $N." + code, objectName, objectName);
return true;
}
private boolean generateMapHandler(Builder methodBuilder, DeclaredType type, String objectName, String code) {
TypeMirror keyType = type.getTypeArguments().get(0);
boolean keyIsMessage = typeUtils.isSubType(keyType, NetworkMessage.class);
TypeMirror valueType = type.getTypeArguments().get(1);
boolean valueIsMessage = typeUtils.isSubType(valueType, NetworkMessage.class);
if (keyIsMessage || valueIsMessage) {
ParameterizedTypeName entryType = ParameterizedTypeName.get(
ClassName.get(Entry.class),
TypeName.get(keyType),
TypeName.get(valueType)
);
ParameterizedTypeName entrySetType = ParameterizedTypeName.get(ClassName.get(Set.class), entryType);
String entrySetName = objectName + "EntrySet";
methodBuilder.beginControlFlow("if ($N != null)", objectName);
methodBuilder.addStatement("$T $N = $N.entrySet()", entrySetType, entrySetName, objectName);
methodBuilder.beginControlFlow("for ($T entry : $N)", entryType, entrySetName);
methodBuilder.addStatement("$T key = entry.getKey()", keyType);
methodBuilder.addStatement("$T value = entry.getValue()", valueType);
if (keyIsMessage) {
methodBuilder.addStatement("if (key != null) key." + code);
}
if (valueIsMessage) {
methodBuilder.addStatement("if (value != null) value." + code);
}
methodBuilder.endControlFlow();
methodBuilder.endControlFlow().addCode("\n");
return true;
}
return false;
}
/**
* Generates implementations of {@link #hashCode} and {@link #equals} for the provided {@code message} and adds them to the provided
* builder.
*/
private static void generateEqualsAndHashCode(TypeSpec.Builder messageImplBuilder, MessageClass message) {
MethodSpec.Builder equals = MethodSpec.methodBuilder("equals")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(boolean.class)
.addParameter(Object.class, "other")
.addCode(CodeBlock.builder()
.beginControlFlow("if (this == other)")
.addStatement("return true")
.endControlFlow()
.build())
.addCode(CodeBlock.builder()
.beginControlFlow("if (other == null || getClass() != other.getClass())")
.addStatement("return false")
.endControlFlow()
.build());
MethodSpec.Builder hashCode = MethodSpec.methodBuilder("hashCode")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(int.class);
if (message.getters().isEmpty()) {
equals.addStatement("return true");
hashCode.addStatement("return $T.class.hashCode()", message.implClassName());
messageImplBuilder
.addMethod(equals.build())
.addMethod(hashCode.build());
return;
}
var arrays = new ArrayList<ExecutableElement>();
var primitives = new ArrayList<ExecutableElement>();
var others = new ArrayList<ExecutableElement>();
for (ExecutableElement element : message.getters()) {
TypeKind typeKind = element.getReturnType().getKind();
if (typeKind.isPrimitive()) {
primitives.add(element);
} else if (typeKind == TypeKind.ARRAY) {
arrays.add(element);
} else {
others.add(element);
}
}
CodeBlock.Builder comparisonStatement = CodeBlock.builder().add("return ");
boolean first = true;
// objects are compared using "Objects.equals"
for (ExecutableElement other : others) {
if (first) {
first = false;
} else {
comparisonStatement.add(" && ");
}
String fieldName = other.getSimpleName().toString();
comparisonStatement.add("$T.equals(this.$L, otherMessage.$L)", Objects.class, fieldName, fieldName);
}
// arrays are compared using "Arrays.equals"
for (ExecutableElement array : arrays) {
if (first) {
first = false;
} else {
comparisonStatement.add(" && ");
}
String fieldName = array.getSimpleName().toString();
comparisonStatement.add("$T.equals(this.$L, otherMessage.$L)", Arrays.class, fieldName, fieldName);
}
// primitives are compared using "==", except for floating point values (because of NaNs and stuff)
for (ExecutableElement primitive : primitives) {
if (first) {
first = false;
} else {
comparisonStatement.add(" && ");
}
String fieldName = primitive.getSimpleName().toString();
switch (primitive.getReturnType().getKind()) {
case FLOAT:
comparisonStatement.add("$T.compare(this.$L, otherMessage.$L) == 0", Float.class, fieldName, fieldName);
break;
case DOUBLE:
comparisonStatement.add("$T.compare(this.$L, otherMessage.$L) == 0", Double.class, fieldName, fieldName);
break;
default:
comparisonStatement.add("this.$L == otherMessage.$L", fieldName, fieldName);
break;
}
}
equals
.addStatement("var otherMessage = ($T)other", message.implClassName())
.addStatement(comparisonStatement.build());
hashCode
.addStatement("int result = 0");
// primitives can be boxed and used in "Objects.hash"
String objectHashCode = Stream.concat(primitives.stream(), others.stream())
.map(element -> "this." + element.getSimpleName())
.collect(Collectors.joining(", ", "result = $T.hash(", ")"));
if (!objectHashCode.isEmpty()) {
hashCode.addStatement(objectHashCode, Objects.class);
}
for (ExecutableElement array : arrays) {
hashCode.addStatement("result = 31 * result + $T.hashCode(this.$L)", Arrays.class, array.getSimpleName());
}
hashCode.addStatement("return result");
messageImplBuilder
.addMethod(equals.build())
.addMethod(hashCode.build());
}
/**
* Creates a constructor for the current Network Message implementation.
*/
private static MethodSpec constructor(List<FieldSpec> fields, Set<String> notNullFieldNames, Set<String> marshallableFieldNames) {
MethodSpec.Builder constructor = MethodSpec.constructorBuilder()
.addModifiers(Modifier.PRIVATE);
fields.forEach(field -> {
String fieldName = field.name;
if (notNullFieldNames.contains(fieldName) && marshallableFieldNames.contains(fieldName)) {
CodeBlock nullCheck = CodeBlock.builder()
.beginControlFlow("if ($L == null && $L == null)", fieldName, addByteArrayPostfix(fieldName))
.addStatement("throw new $T($S)", NullPointerException.class, fieldName + " is not marked @Nullable")
.endControlFlow()
.build();
constructor.addCode(nullCheck);
}
constructor
.addParameter(field.type, fieldName)
.addStatement("this.$N = $N", field, field);
});
return constructor.build();
}
/**
* Generates a nested static class that implements the Builder interface, generated during previous steps.
*/
private static TypeSpec generateBuilderImpl(
MessageClass message, ClassName messageImplClass, ClassName builderName, Set<String> notNullFieldNames,
Set<String> marshallableFieldNames) {
List<ExecutableElement> messageGetters = message.getters();
var fields = new ArrayList<FieldSpec>(messageGetters.size());
var setters = new ArrayList<MethodSpec>(messageGetters.size());
var getters = new ArrayList<MethodSpec>(messageGetters.size());
for (ExecutableElement messageGetter : messageGetters) {
boolean isMarshallable = messageGetter.getAnnotation(Marshallable.class) != null;
var getterReturnType = TypeName.get(messageGetter.getReturnType());
String getterName = messageGetter.getSimpleName().toString();
FieldSpec field = FieldSpec.builder(getterReturnType, getterName)
.addModifiers(Modifier.PRIVATE).build();
fields.add(field);
Builder setterBuilder = MethodSpec.methodBuilder(getterName)
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(builderName)
.addParameter(getterReturnType, getterName);
if (notNullFieldNames.contains(getterName)) {
setterBuilder.addStatement("$T.requireNonNull($L, $S)", Objects.class, getterName, getterName + " is not marked @Nullable");
}
setterBuilder.addStatement("this.$N = $L", field, getterName)
.addStatement("return this");
MethodSpec setter = setterBuilder.build();
setters.add(setter);
MethodSpec getter = MethodSpec.methodBuilder(getterName)
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(getterReturnType)
.addStatement("return $N", field)
.build();
getters.add(getter);
if (isMarshallable) {
String name = addByteArrayPostfix(getterName);
FieldSpec baField = FieldSpec.builder(BYTE_ARRAY_TYPE, name)
.addModifiers(Modifier.PRIVATE)
.build();
fields.add(baField);
MethodSpec baSetter = MethodSpec.methodBuilder(name)
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(builderName)
.addParameter(BYTE_ARRAY_TYPE, name)
.addStatement("this.$N = $L", baField, name)
.addStatement("return this")
.build();
setters.add(baSetter);
MethodSpec baGetter = MethodSpec.methodBuilder(name)
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(BYTE_ARRAY_TYPE)
.addStatement("return $N", baField)
.build();
getters.add(baGetter);
}
}
return TypeSpec.classBuilder("Builder")
.addModifiers(Modifier.PRIVATE, Modifier.STATIC)
.addSuperinterface(builderName)
.addFields(fields)
.addMethods(setters)
.addMethods(getters)
.addMethod(buildMethod(message, messageImplClass, fields, notNullFieldNames, marshallableFieldNames))
.build();
}
/**
* Generates the {@code build()} method for the Builder interface implementation.
*/
private static MethodSpec buildMethod(MessageClass message, ClassName messageImplClass, List<FieldSpec> fields,
Set<String> notNullFieldNames, Set<String> marshallableFieldNames) {
CodeBlock.Builder methodBodyBuilder = CodeBlock.builder();
methodBodyBuilder.add("return new $T", messageImplClass);
CodeBlock constructorArgs = fields.stream()
.map(field -> {
String fieldName = field.name;
if (notNullFieldNames.contains(fieldName) && !marshallableFieldNames.contains(fieldName)) {
return CodeBlock.of("$T.requireNonNull($L, $S)", Objects.class, fieldName, fieldName + " is not marked @Nullable");
}
return CodeBlock.of("$L", fieldName);
})
.collect(CodeBlock.joining(", ", "(", ")"));
methodBodyBuilder.add(constructorArgs);
return MethodSpec.methodBuilder("build")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(message.className())
.addStatement(methodBodyBuilder.build())
.build();
}
private static boolean requiresNotNullCheck(ExecutableElement el) {
return !methodReturnsPrimitive(el) && !methodReturnsNullableValue(el);
}
/** Types that may hold network message. */
private enum MaybeMessageType {
OBJECT_ARRAY,
COLLECTION,
MESSAGE,
MAP;
}
private static TypeMirror igniteStringifierValueTypeMirror(IgniteStringifier annotation) {
try {
annotation.value();
// Should not happen.
return null;
} catch (MirroredTypeException mte) {
return mte.getTypeMirror();
}
}
}
|
google/s2-geometry-library-java | 34,954 | library/src/com/google/common/geometry/S2ShapeTracker.java | /*
* Copyright 2024 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.geometry;
import static com.google.common.geometry.S2Cell.Boundary.BOTTOM_EDGE;
import static com.google.common.geometry.S2Cell.Boundary.LEFT_EDGE;
import static com.google.common.geometry.S2Cell.Boundary.RIGHT_EDGE;
import static com.google.common.geometry.S2Cell.Boundary.TOP_EDGE;
import static java.lang.Math.ceil;
import static java.lang.Math.floor;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.Math.round;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.geometry.S2Cell.Boundary;
import com.google.common.geometry.S2RobustCellClipper.Crossing;
import com.google.common.geometry.S2RobustCellClipper.CrossingType;
import com.google.common.geometry.primitives.CountingBitset;
import it.unimi.dsi.fastutil.longs.Long2IntMap;
import it.unimi.dsi.fastutil.longs.Long2IntOpenHashMap;
import java.util.List;
import java.util.function.BiFunction;
import org.jspecify.annotations.Nullable;
/**
* A class for tracking a shape in an S2ShapeIndex and detecting when all the pieces of the shape
* have been visited.
*
* <p>This works by tracking segments of S2Cell boundaries in IJ coordinates that are contained by a
* shape. These segments must always match up across cell boundaries. By "toggling" segments on and
* off as we visit them, we can arrange for them all to cancel once we have seen all the interior
* pieces of a shape.
*
* <p>Once we have seen every chain at least once, and there are no outstanding boundary segments
* contained by the shape, then we have visited all of the shape.
*
* <p>1. Usage
*
* <pre>
* Instantiate an S2ShapeTracker instance and initialize it with an S2Shape:
* {@code
* S2ShapeTracker tracker = new S2ShapeTracker(shape);
* } or {@code
* tracker.reset(shape);
* }
*
* Then, as you iterate through an index, mark individual chains as seen when you come across them:
*
* {@snippet :
* tracker.markChain(chain);
* }
*
* This method is cheap to call (O(1) complexity) and idempotent so it's safe to call multiple times
* per chain.
*
* <p>Use S2RobustCellClipper to generate boundary crossings for the current cell and process them
* with shape tracker:
*
* {@snippet :
* S2RobustCellClipper clipper = new S2RobustCellClipper();
*
* clipper.startCell(cell);
* for (each edge from shape in cell) {
* clipper.clipEdge(edge);
* }
*
* tracker.processCrossings(cell, clipper.getCrossings());
* }
*
* <p>If the cell has no edges but is contained by the shape, use addCellBoundary() to account for
* the entire boundary of the cell in the tracker:
*
* {@snippet :
* tracker.addCellBoundary(cell);
* }
*
* The current of the shape can be queried via the finished() method:
*
* {@snippet :
* if (tracker.finished()) {
* ... report results
* }
* }
*
* <p>The tracker can then be reused for a new shape by calling reset().
*
* <p>The rest of this documentation covers the theory and details behind interior tracking. If you
* just want to get started tracking shapes, feel free to skip.
*
* <p>2. Theory
*
* <p>For bulk queries in general, and containment in particular, we need to know when we've seen
* all of a shape so that we know when it's safe to make a final decision regarding the shape and
* return results to the user.
*
* <p>Since shape edges can span multiple cells, it's not sufficient to just track that we've seen
* all the edges in a shape. If we imagine a shape with one long edge that crosses many cells, the
* first cell we visit we will mark the edge as seen and report results. But the edge continued
* outside the cell and we may have missed information relevant to the query result.
*
* <p>The same reasoning applies to 2D shapes. Imagine a ring circling the Earth around the equator.
* If we create another shape by reversing the order of the first, the two shapes won't be contained
* even though they have the same exact vertices. The interior of each escapes the interior of the
* other.
*
* <p>But, importantly, we have no way to know this directly, since all the vertices are coincident,
* none of the vertices will test as outside the shapes nor will edges cross.
*
* <p>This interior "escapage" is only detectable by knowing that there is "more interior" pending
* when we finish processing all the edges.
*
* <p>By tracking portions of the cell boundary that are contained by a shape and removing them when
* we visit the opposite side of the boundary, we can track what pieces of a shape are still
* unresolved.
*
* <p>Multi-points are trivial, they can't cross a cell boundary (or any edge) and are each their
* own chain so we merely have to check that we've seen all the chains in the shape.
*
* <p>Polylines are slightly more complex because they can cross a boundary at a point. These
* crossings are 0 dimensional however so we only have to match crossings and don't have to worry
* about extents.
*
* <p>Finally, polygons can contain a 1 dimensional segment of the cell boundary, so we have to
* track both the presence of a crossing and its extent.
*
* <p>It's entirely legitimate (and common) for us to have a large cell bounded by smaller ones, so
* we may see a segment of a boundary that is then visited on its opposite side in smaller pieces
* (or vice-versa):
*
* <pre>
* ┌─────┬─┐ ┌───────┐ ┌───────┐ ┌───────┐
* │ ├─┘ → │ ┌─┤ → │ │ → │ │
* │ │ │ ├─┘ │ ┌─┤ │ │
* └─────┘ └─────┘ └─────┴─┘ └───────┘
* </pre>
*
* <p>Thus, we need a way to track boundary intervals that also allows us to build and cancel in
* small pieces. We can't merely store an interval (A,B) and expect to cancel it in a single piece
* with another interval (B,A).
*
* <p>2.1 The Problem
*
* <p>Given a grid of non-overlapping cells (which any S2ShapeIndex is), we'd like to devise a
* system where we can track boundary segments efficiently. We know a few things that we can use to
* our advantage:
*
* <ol>
* <li>If we're destined to see all of a shape, then any time it crosses a cell boundary, we will
* always see both sides of the boundary due to the cell padding we use when building indices.
* <li>We will always see exactly the same crossing points in cells on either side of the boundary
* due to the exactness in UVEdgeClipper.
* <li>In IJ space, cells become axis-aligned squares with their boundary segments lying along
* constant I or J values.
* </ol>
*
* <p>Since we can rely on seeing the same section of cell boundary twice the problem is one of
* pairing and canceling matching segments.
*
* <p>2.2 The Closure Grid
*
* <p>Towards this goal, let's note that that we can build a consistent notion of sign along cell
* boundaries. By consistent, we mean that we can assign every boundary segment of every S2 cell a
* sign value (either +1 or -1) that is opposite across the boundary.
*
* <p>Cells have consistently numbered edges, ordered CCW around the boundary, numbered 0 to 3. If
* we define edges 0 and 1 to be positive and edges 2 and 3 to be negative, then we can see that if
* we tessellate the cells the sign values are equal-and-opposite across boundaries:
*
* <pre>
* ┌───────┐ ┌───────┐ ┌───────┬───────┐
* │ 2 │ │ - │ │ - │ - │
* │3 1│ → │- +│ → │- +│- +│
* │ 0 │ │ + │ │ + │ + │
* └───────┘ └───────┘ ├───────┼───────┤
* │ - │ - │
* │- +│- +│
* │ + │ + │
* └───────┴───────┘
* </pre>
*
* <p>Because cell orientations on a given face are constant, this holds true even when there are
* cells of different sizes on each side of the boundary. One side will always have the positive
* boundaries and the other side the negative.
*
* <p>This works automatically within a given face, but is it also true between faces? The naive
* answer is "almost". Let's look at the face cells with sign values assigned as described above:
*
* <pre>
* ┌─────────────────────┐
* │ ┌───────┬───────┐
* │ │J - │I - │
* │ ┌╴ │- 4 +│- 5 +│ ┐
* │ │ + I│ + J│ │
* │ ┌───────┼───────┼───────┘ │
* │ │J - │I - │ │
* │ ┌╴ │- 2 +│- 3 +│ ╶┘ │
* │ │ + I│ + J│ │
* │ ┌───────┼───────┼───────┘ │
* │ │J - │I - │ │
* └ │- 0 +│- 1 +│ ╶┘ │
* │ + I│ + J│ │
* └───────┴───────┘ │
* └─────────────────────┘
* </pre>
*
* <p>Touching face edges that have the same sign (and are thus incompatible) are connected by
* lines. Interestingly all the "interior" face boundaries in the diagram are consistent.
*
* <p>The edge of a face at a maximum IJ coordinate (S2Projections.LIMIT_IJ == 2^30) spatially
* aliases the edge of the adjacent face at value 0. We need to break this tie so that intervals
* along the boundaries between faces are canceled properly.
*
* <p>The exact choice in breaking ties doesn't matter as long as it's consistent, so we take the
* convention that maximum values alias back down to the minimum value on the adjacent face and we
* do our interval accounting there. This fits nicely with the mental model of faces being "half
* open" and not containing their upper boundaries.
*
* <p>As an example, the maximum J coordinate on face 0 ends up being stored using the minimum I
* coordinate of face 2, and we flip the values of the intervals so that they match the coordinate
* system on face two.
*
* <p>Looking at the diagram above, all of our incompatible face edges are at a maximum J value.
* Since we have to wrap those boundaries anyways, while we're doing that, we can negate the sign as
* well and we now have a fully consistent signed boundary system.
*
* <p>2.3 Summing Interval Lengths
*
* <p>Now that we have a sign convention that naturally cancels across boundaries, we could track
* polygon closure by finding segments of the grid that are contained (by finding cell boundary
* crossings), and summing their lengths with appropriate signs. Since we'll see both sides of a
* boundary, we'll always add a given length and subtract it out later. The lengths naturally
* cancel.
*
* <p>To do this, take a section of boundary that contains one or more crossings:
*
* <pre>
* ←
* •────────•━━━━•───────────•━━━━•─────────────•━━━━━━━━━━━━•
* X A B C D E Y
* →
* </pre>
*
* <p>When we're traversing left-to-right, the first contained interval we'll see is (A,B). (B,C) is
* not contained so we ignore it, then we take (C,D) and (E,Y).
*
* <p>We'll maintain a sum, and for each contained interval we'll add the first point and subtract
* the second point:
*
* <pre>
* sum += A-B + C-D + E-Y
* </pre>
*
* <p>Now, when we traverse the opposite side, we'll proceed right-to-left, and we'll see the
* intervals with their vertices swapped: (Y,E), (D,C), and (B,A).
*
* <p>We'll still add the first, and subtract the second:
*
* <pre>
* sum += Y-E + D-C + B-A
* </pre>
*
* <p>If we look at this together with the first sum:
*
* <pre>
* sum = A-B + C-D + E-Y + Y-E + D-C + B-A = 0
* </pre>
*
* <p>Each crossing point has been canceled out exactly, leaving a zero total.
*
* <p>Note that this left-to-right and right-to-left ordering is a natural consequence of cells all
* being CCW oriented. Cells with adjacent boundaries will traverse that boundary in opposite
* directions:
*
* <pre>
* ┌───────┬───────┐
* │ ← │ ← │
* │↓ ↑│↓ ↑│
* │ → │ → │
* ├───────┼───────┤
* │ ← │ ← │
* │↓ ↑│↓ ↑│
* │ → │ → │
* └───────┴───────┘
* </pre>
*
* <p>This summation also works naturally for splitting up larger intervals, imagine a large cell
* with a piece of its boundary contained, bordered by smaller cells:
*
* <pre>
*
* smaller cells │ 0 │ 1 │ 2 │ 3
* │ → B → C → D → │
* ├─────•━━┷━━━━━━━━┷━━━━━━━━┷━━━━•───┤
* large cell │ X ← Y │
* </pre>
*
* Moving right-to-left first, we'll add (Y,X) first:
*
* <pre>
* total = Y-X
* </pre>
*
* Now, we process left-to-right (we can process the cells in any order, let's assume 1,2,0,3):
*
* <pre>
* cell1 - total = Y-X + B-C
* cell2 - total = Y-X + B-C + C-D = Y-X + B-D
* cell0 - total = Y-X + B-D + X-B = Y-D
* cell3 - total = Y-D + D-Y = 0
* </pre>
*
* The values close, and the same is true in any order, let's do 3,2,1,0:
*
* <pre>
* cell3 - total = Y-X + D-Y = D-X
* cell2 - total = D-X + C-D = C-X
* cell1 - total = C-X + B-C = B-X
* cell0 - total = B-X + X-B = 0
* </pre>
*
* <p>In this example we can see more clearly how we "cancel" pieces of the Y-X interval one at a
* time, but we're always left with the remaining interval.
*
* <p>2.3.1 General Iteration Order Safety
*
* <p>The attentive reader might worry that we might accidentally have a crossing of, say, length 2,
* and pair it with two unrelated length 1 crossings on the other side of the boundary, and get a
* net zero without actually closing all the intervals. We need to ensure we can't get a false
* positive for a given shape.
*
* <p>First, let's assume all of our points are distinct. This is easy enough to arrange in IJ
* coordinates, since (face, i, j) specifies a unique point, and cells that are used in an
* S2ShapeIndex do not overlap.
*
* <p>If we look at a closure sum:
*
* <pre>
* (A-B) + (B-C) + (C-A) = 0
* </pre>
*
* <p>Note the general pattern is that we add an element, and then eventually subtract its exact
* value again to cancel it out. If our points are distinct, we don't have to rely on actually
* summing them together, we can just track which points we've accumulated with a set, inserting and
* removing them as we go.
*
* <p>We can implicitly track an IJ interval (A,B) by storing its endpoints.
*
* <p>It's possible to see multiple of the same point, e.g. if you had multiple crossings within the
* same IJ coordinate. To support this, we'll use a hashmap of points mapping to a count.
*
* <p>Our final method for tracking closure becomes simply:
*
* <pre>
* hashmap[point]++
* </pre>
*
* <p>And subtracting a point:
*
* <pre>
* hashmap[point]--
* </pre>
*
* <p>With zero being the default value for a new entry. If the count goes to zero in either case,
* we remove the point. When the set is empty, we have an exact closure.
*
* <p>2.4 Rounding intervals
*
* <p>We have to be careful with very small crossings. It's entirely possible for a shape to cross a
* cell boundary with a width less than one unit in IJ space:
*
* <pre>
* ╲ ╱
* ┌─────┬─• •┬─────┬─────┐
* ╲╱
* </pre>
*
* <p>If we round these intervals naively, we can end up with a zero-length interval in IJ space
* with equal endpoints. That would result in an interval (A,A) which we would add as A-A = 0, which
* wouldn't contribute to the tracking information.
*
* <p>We can resolve this by rounding intervals _outwards_. We'll round the lower endpoint down, and
* the higher endpoint up:
*
* <pre>
* ┌─────┬─• •─┬─────┬─────┐
* ← →
* </pre>
*
* <p>This will give always give us an interval that's at least one unit length, and it works
* consistently across T points and corners, so that they will properly close:
*
* <pre>
* │ \ / │ │ ← → │ │ ___ │
* ├────•╷•────┤ → ├────•╷•────┤ → ├─────┬─────┤
* │ │ │ │ ← │→ │ │ ‾│‾ │
* </pre>
*/
@SuppressWarnings("Assertion")
public class S2ShapeTracker {
// Tracking state.
private int dimension = -1;
private final CountingBitset chainsSeen = new CountingBitset();
// These are the maps for tracking and pairing matching segments crossing IJ intervals, as
// described above. For any IJ coordinate, we track the number of points seen at that coordinate.
// We track points in the I and J axis separately to avoid collisions and reduce the hashmap
// sizes.
/**
* Map from (face,i,j) encoded as a long, to the number of crossings at that that point, for IJ
* axis 0. (Which of i or j is 0 or 1 depends on the face.)
*/
private final Long2IntMap points0 = new Long2IntOpenHashMap();
/**
* Map from (face,i,j) encoded as a long, to the number of crossings at that that point, for IJ
* axis 1. (Which of i or j is 0 or 1 depends on the face.)
*/
private final Long2IntMap points1 = new Long2IntOpenHashMap();
public S2ShapeTracker() {
points0.defaultReturnValue(0);
points1.defaultReturnValue(0);
}
public S2ShapeTracker(int dimension, int numChains) {
points0.defaultReturnValue(0);
points1.defaultReturnValue(0);
init(dimension, numChains);
}
/** Constructs a new S2ShapeTracker for the given S2Shape. */
public S2ShapeTracker(S2Shape shape) {
points0.defaultReturnValue(0);
points1.defaultReturnValue(0);
init(shape);
}
/** Returns the map of points for the given I or J axis. */
private Long2IntMap points(int axis) {
return axis == 0 ? points0 : points1;
}
/** Resets state to track a new shape. */
public void init(int dimension, int numChains) {
this.dimension = dimension;
chainsSeen.reset(numChains);
points0.clear();
points1.clear();
}
/** Resets state for a new shape as above, but takes the shape directly. */
public void init(S2Shape shape) {
// Don't track edges for the full and empty shapes, since we'll never actually see an edge
// despite them having one chain.
int numChains = shape.numChains();
if (shape.isEmpty() || shape.isFull()) {
// TODO(torrey): Add test cases for empty shapes of various dimensions, as well as the full
// shape. Also in C++. Make corrections here as needed.
assert numChains == 1;
numChains = 0;
}
init(shape.dimension(), numChains);
}
/**
* Marks a chain as having been seen. This operation is idempotent and may be called multiple
* times for the same chain safely.
*/
public void markChain(int chain) {
chainsSeen.set(chain);
}
/**
* Processes a list of cell boundary crossings produced by S2RobustCellClipper and either adds
* interval crossings (for polygons), point crossings (for polylines), or does nothing (for
* points).
*
* <p>For a 2D shape, if there are no crossings, then this operation does nothing. The user should
* check whether the shape contains the cell boundary and add it via addCellBoundary() if
* necessary.
*/
public void processCrossings(S2Cell cell, List<Crossing> crossings) {
// If there are no crossings, or all we have are points, do nothing.
int ncrossing = crossings.size();
if (ncrossing == 0 || dimension == 0) {
return;
}
// Lookup cell boundary constants once.
double[] cellUvCoords = new double[4];
int[] cellIjCoords = new int[4];
for (int k = 0; k < 4; ++k) {
Boundary b = Boundary.fromValue(k);
cellUvCoords[k] = cell.getUVCoordOfBoundary(b);
cellIjCoords[k] = cell.getIJCoordOfBoundary(b);
}
if (dimension == 1) { //
// Note that we convert from UV to IJ using std::round which always rounds ties away from
// zero. This is needed to ensure that we round to correct values across face boundaries that
// might be flipped.
for (Crossing crossing : crossings) {
int ij = uvToIjRound(crossing.intercept);
int face = cell.face();
int axis = constantBoundaryAxis(cell.face(), crossing.boundary);
int coord = cellIjCoords[crossing.boundary.value];
if (crossing.boundary.value < 2) {
addPoint(face, axis, coord, ij);
} else {
delPoint(face, axis, coord, ij);
}
}
return;
}
// We have one or more crossings for a polygon, we need to scan around the boundary and make
// intervals where the shape contains the boundary.
//
// Crossings are ordered counter-clockwise only for the purposes of toggling shape-insideness,
// the actual U or V coordinates of the intercepts aren't necessarily monotonic because we don't
// toggle edges, we increment the winding number at coordinates, which is computed in the same
// way on the other side of the cell boundary.
boolean interior = crossings.get(0).crossingType == CrossingType.INCOMING;
int i = 0;
int[] bs = {0}; // Workaround for Java not supporting lambda capture. Just b.
Boundary boundary;
for (int b = 0; b < 4; ++b) {
bs[0] = b;
boundary = Boundary.fromValue(b);
// Lookup the axis, constant coordinate, and start and end coordinates of this boundary
// segment.
int axis = constantBoundaryAxis(cell.face(), boundary);
int coord = cellIjCoords[b];
int bnext = (b + 1) % 4;
int bprev = (b + 3) % 4;
double uvbeg = cellUvCoords[bprev];
double uvend = cellUvCoords[bnext];
int ijbeg = cellIjCoords[bprev];
int ijend = cellIjCoords[bnext];
// Returns true if two intercepts are properly order along the boundary. For boundaries 0 and
// 1, intercepts increase, and for 2 and 3, they decrease.
XYPredicate ordered = (x, y) -> bs[0] < 2 ? x < y : x > y;
// If there's a crossing on this boundary and we start in the interior, then add an interval
// between the start corner and the crossing. We do this as a separate step so that we can use
// the ij coordinate of the start point directly (which we know exactly). If we used the uv
// coordinate of the start point and rounded, it's possible not to end up on the correct ij
// value for the corner.
double uvprev;
if (i < ncrossing && crossings.get(i).boundary == boundary) {
Crossing crossing = crossings.get(i);
uvprev = crossing.intercept;
++i;
// In very rare circumstances, it's possible to get a crossing on a boundary, but that has
// an intercept value before (or after) the corner. This can happen when an edge crosses
// very close to the corner.
//
// Fortunately, either:
// 1. We're in the interior so we just added the b-1 segment.
// 2. Or we're not and we're about to add this boundary segment.
//
// Either way, the negative crossing doesn't change the portion of the cell boundary in this
// cell that's contained, so we can ignore it.
if (interior && ordered.test(uvbeg, crossing.intercept)) {
double uv = crossing.intercept;
int ij0 = ijbeg;
int ij1 = (b < 2) ? uvToIjCeil(uv) : uvToIjFloor(uv);
if (ij0 != ij1) {
addInterval(cell.face(), axis, coord, ij0, ij1);
}
}
// Crossing an outgoing edge puts us in the interior.
interior = (crossing.crossingType == CrossingType.OUTGOING);
} else {
// No crossings on this boundary. Just add the entire boundary if needed.
if (interior) {
addInterval(cell.face(), axis, coord, ijbeg, ijend);
}
continue;
}
// Proceed crossing by crossing, added each interval that covers part of the polygon interior,
// and toggle interior state at each crossing.
while (i < ncrossing && crossings.get(i).boundary == boundary) {
Crossing crossing = crossings.get(i);
if (interior) {
// Get canonical ordering of UV coordinates where uv0 < uv1.
double uv0 = min(uvprev, crossing.intercept);
double uv1 = max(uvprev, crossing.intercept);
// Convert to IJ coordinates but round _outwards_ from the interior.
int ij0 = uvToIjFloor(uv0);
int ij1 = uvToIjCeil(uv1);
if (b >= 2) {
int tmp = ij0;
ij0 = ij1;
ij1 = tmp;
}
// If the coordinates are equal despite rounding in different directions, then we must
// have landed on an integer ij value to the limits of floating point precision.
//
// For an interior interval, this can only happen if:
// 1. We had a reverse duplicate edge (not allowed)
// 2. We crossed exactly at a corner.
//
// One case isn't allowed and the other doesn't affect the result, so we can ignore it.
if (ij0 != ij1) {
addInterval(cell.face(), axis, coord, ij0, ij1);
}
}
// Crossing an outgoing edge puts us in the interior.
interior = (crossing.crossingType == CrossingType.OUTGOING);
uvprev = crossing.intercept;
++i;
}
// Either there's no more crossings or we started a new boundary. If we're still in the
// interior, then we need to add an interval between the last crossing and the endpoint.
//
// We do this as a separate step so that we can use the ij coordinate of the start point
// directly (which we know exactly). If we used the uv coordinate of the start point and
// rounded, it's possible not to end up on the correct ij value for the corner.
if (interior) {
// See comment in the start point section above.
if (ordered.test(uvprev, uvend)) {
int ij0 = (b < 2) ? uvToIjFloor(uvprev) : uvToIjCeil(uvprev);
int ij1 = ijend;
// Safe to ignore, see comment above.
if (ij0 != ij1) {
addInterval(cell.face(), axis, coord, ij0, ij1);
}
}
}
}
Preconditions.checkState(i == ncrossing);
}
/**
* Adds the boundaries of the given cell to the tracker.
*
* <p>When a cell is contained by a shape but has no crossings, we still need to account for those
* contained cell boundaries and ensure that we see them on both sides of the boundary.
*
* <p>This function only applies to 2D shapes. For points and polylines, this function is a noop.
*/
public void addCellBoundary(S2Cell cell) {
int face = cell.face();
int bottomAxis = constantBoundaryAxis(face, BOTTOM_EDGE);
int rightAxis = constantBoundaryAxis(face, RIGHT_EDGE);
int topAxis = constantBoundaryAxis(face, TOP_EDGE);
int leftAxis = constantBoundaryAxis(face, LEFT_EDGE);
int bottomEdgeIJ = cell.getIJCoordOfBoundary(BOTTOM_EDGE);
int rightEdgeIJ = cell.getIJCoordOfBoundary(RIGHT_EDGE);
int topEdgeIJ = cell.getIJCoordOfBoundary(TOP_EDGE);
int leftEdgeIJ = cell.getIJCoordOfBoundary(LEFT_EDGE);
addInterval(face, bottomAxis, bottomEdgeIJ, leftEdgeIJ, rightEdgeIJ);
addInterval(face, rightAxis, rightEdgeIJ, bottomEdgeIJ, topEdgeIJ);
addInterval(face, topAxis, topEdgeIJ, rightEdgeIJ, leftEdgeIJ);
addInterval(face, leftAxis, leftEdgeIJ, topEdgeIJ, bottomEdgeIJ);
}
/**
* Returns true if the shape is finished. This means that all the chains in the shape have been
* seen at least once and there are no outstanding interior pieces.
*/
public boolean finished() {
return chainsSeen.full() && points0.isEmpty() && points1.isEmpty();
}
/**
* Creates a 64 bit integer (which is treated as unsigned) by appending face, i and j coordinates.
*/
private long ijKey(int face, int i, int j) {
assert face < 6;
assert i <= 1 << 30;
assert j <= 1 << 30;
long ans = 0;
ans |= ((long) face) << 60;
ans |= ((long) i) << 30;
ans |= (long) j;
return ans;
}
/**
* Adds the endpoints of an interval to the tracker. The sign of ij0 and ij1 are always the same,
* this function always adds the point corresponding to ij0 and subtracts the point corresponding
* to ij1.
*
* <p>Any given interval must be canceled out by an interval of the opposite sign, i.e. call this
* function with the endpoints reversed: ij1, ij0.
*
* <p>This ordering appears naturally when working with cell boundary crossings. The order would
* be ij0,ij1 on a positive boundary and ij1,ij0 on a negative boundary.
*
* <p>REQUIRES: Shape dimension is 2, and ij0 and ij1 aren't equal.
*/
@VisibleForTesting
void addInterval(int face, int axis, int ijcoord, int ij0, int ij1) {
assert ij0 != ij1;
assert dimension == 2;
// If we're at the maximum coordinate value, wrap back to coordinate 0 on the
// adjacent face. We always switch axes when we're transitioning across the
// face boundary, but only when we go across the maximum J value do we negate
// the value we were given.
if (ijcoord == S2Projections.LIMIT_IJ) {
ijcoord = 0;
face = adjacentFace(face, axis);
if (axis == 1) {
ij0 = S2Projections.LIMIT_IJ - ij0;
ij1 = S2Projections.LIMIT_IJ - ij1;
}
axis = 1 - axis;
}
// Increment the value at the ijKey for ij0, removing it from the map it then becomes zero.
points(axis).compute(ijKey(face, ijcoord, ij0), incrementFn);
// And decrement the value at the ijKey for ij1, removing it from the map if it becomes zero.
points(axis).compute(ijKey(face, ijcoord, ij1), decrementFn);
}
/** Adds a point to the tracker. */
@VisibleForTesting
void addPoint(int face, int axis, int ijcoord, int ij) {
// If we're at the maximum coordinate value, wrap back to coordinate 0 on the adjacent face. We
// always switch axes when we're transitioning across the face boundary, but only when we go
// across the maximum J value do we negate the value we were given.
boolean flip = false;
if (ijcoord == S2Projections.LIMIT_IJ) {
ijcoord = 0;
face = adjacentFace(face, axis);
if (axis == 1) {
flip = true;
ij = S2Projections.LIMIT_IJ - ij;
}
axis = 1 - axis;
}
if (flip) {
delPoint(face, axis, ijcoord, ij);
return;
}
// Increment the value at the ijKey, removing it from the map it then becomes zero.
points(axis).compute(ijKey(face, ijcoord, ij), incrementFn);
}
/** Removes a point from the tracker. */
@VisibleForTesting
void delPoint(int face, int axis, int ijcoord, int ij) {
// If we're at the maximum coordinate value, wrap back to coordinate 0 on the adjacent face. We
// always switch axes when we're transitioning across the face boundary, but only when we go
// across the maximum J value do we negate the value we were given.
boolean flip = false;
if (ijcoord == S2Projections.LIMIT_IJ) {
ijcoord = 0;
face = adjacentFace(face, axis);
if (axis == 1) {
flip = true;
ij = S2Projections.LIMIT_IJ - ij;
}
axis = 1 - axis;
}
if (flip) {
addPoint(face, axis, ijcoord, ij);
return;
}
// Decrement the value at the ijKey, removing it from the map if it becomes zero.
points(axis).compute(ijKey(face, ijcoord, ij), decrementFn);
}
/**
* Function for map.compute() that increments a map entry value. Creates the entry with starting
* value 1 if the key is not present. Removes the entry if the value becomes zero.
*/
private static final BiFunction<Long, Integer, Integer> incrementFn =
(Long key, Integer value) -> {
if (value == null) {
return 1;
}
if (value == -1) {
return null;
}
return value + 1;
};
/**
* Function for map.compute() that decrements a map entry value. Creates the entry with starting
* value -1 if the key is not present. Removes the entry if the value becomes zero.
*/
private static final BiFunction<Long, Integer, Integer> decrementFn =
new BiFunction<Long, Integer, Integer>() {
@Override
public @Nullable Integer apply(Long key, Integer value) {
if (value == null) {
return -1;
}
if (value == 1) {
return null;
}
return value - 1;
}
};
/** Converts from UV to IJ coordinates but always rounds down. */
private static int uvToIjFloor(double uv) {
return (int) floor(S2Projections.LIMIT_IJ * S2Projections.uvToST(uv));
}
/** Converts from UV to IJ coordinates but always rounds up. */
private static int uvToIjCeil(double uv) {
return (int) ceil(S2Projections.LIMIT_IJ * S2Projections.uvToST(uv));
}
/** Converts from UV to IJ coordinates but rounds ties away from zero. */
private static int uvToIjRound(double uv) {
return (int) round(S2Projections.LIMIT_IJ * S2Projections.uvToST(uv));
}
/** Returns the axis along which a given cell boundary is constant. */
private static int constantBoundaryAxis(int face, Boundary boundary) {
int axis = 0;
if ((boundary == BOTTOM_EDGE) || (boundary == TOP_EDGE)) {
axis = 1;
}
// Odd faces have axes flipped.
return (face % 2 == 0) ? axis : 1 - axis;
}
/** Returns the face adjacent to the given face across the given axis. */
private static int adjacentFace(int face, int axis) {
return (face + (axis == 1 ? 2 : 1)) % 6;
}
private interface XYPredicate {
boolean test(double x, double y);
}
}
|
googleapis/google-cloud-java | 35,947 | java-scheduler/google-cloud-scheduler/src/test/java/com/google/cloud/scheduler/v1/CloudSchedulerClientHttpJsonTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.scheduler.v1;
import static com.google.cloud.scheduler.v1.CloudSchedulerClient.ListJobsPagedResponse;
import static com.google.cloud.scheduler.v1.CloudSchedulerClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.scheduler.v1.stub.HttpJsonCloudSchedulerStub;
import com.google.common.collect.Lists;
import com.google.protobuf.Any;
import com.google.protobuf.Duration;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import com.google.rpc.Status;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class CloudSchedulerClientHttpJsonTest {
private static MockHttpService mockService;
private static CloudSchedulerClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonCloudSchedulerStub.getMethodDescriptors(),
CloudSchedulerSettings.getDefaultEndpoint());
CloudSchedulerSettings settings =
CloudSchedulerSettings.newHttpJsonBuilder()
.setTransportChannelProvider(
CloudSchedulerSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = CloudSchedulerClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void listJobsTest() throws Exception {
Job responsesElement = Job.newBuilder().build();
ListJobsResponse expectedResponse =
ListJobsResponse.newBuilder()
.setNextPageToken("")
.addAllJobs(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListJobsPagedResponse pagedListResponse = client.listJobs(parent);
List<Job> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listJobsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listJobs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listJobsTest2() throws Exception {
Job responsesElement = Job.newBuilder().build();
ListJobsResponse expectedResponse =
ListJobsResponse.newBuilder()
.setNextPageToken("")
.addAllJobs(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String parent = "projects/project-5833/locations/location-5833";
ListJobsPagedResponse pagedListResponse = client.listJobs(parent);
List<Job> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listJobsExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "projects/project-5833/locations/location-5833";
client.listJobs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getJobTest() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
Job actualResponse = client.getJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
client.getJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getJobTest2() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
Job actualResponse = client.getJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getJobExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
client.getJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createJobTest() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
Job job = Job.newBuilder().build();
Job actualResponse = client.createJob(parent, job);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
Job job = Job.newBuilder().build();
client.createJob(parent, job);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createJobTest2() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String parent = "projects/project-5833/locations/location-5833";
Job job = Job.newBuilder().build();
Job actualResponse = client.createJob(parent, job);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createJobExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "projects/project-5833/locations/location-5833";
Job job = Job.newBuilder().build();
client.createJob(parent, job);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateJobTest() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
Job job =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
Job actualResponse = client.updateJob(job, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
Job job =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateJob(job, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteJobTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
client.deleteJob(name);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
client.deleteJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteJobTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
client.deleteJob(name);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteJobExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
client.deleteJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void pauseJobTest() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
Job actualResponse = client.pauseJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void pauseJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
client.pauseJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void pauseJobTest2() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
Job actualResponse = client.pauseJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void pauseJobExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
client.pauseJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void resumeJobTest() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
Job actualResponse = client.resumeJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void resumeJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
client.resumeJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void resumeJobTest2() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
Job actualResponse = client.resumeJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void resumeJobExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
client.resumeJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void runJobTest() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
Job actualResponse = client.runJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void runJobExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]");
client.runJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void runJobTest2() throws Exception {
Job expectedResponse =
Job.newBuilder()
.setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString())
.setDescription("description-1724546052")
.setSchedule("schedule-697920873")
.setTimeZone("timeZone-2077180903")
.setUserUpdateTime(Timestamp.newBuilder().build())
.setStatus(Status.newBuilder().build())
.setScheduleTime(Timestamp.newBuilder().build())
.setLastAttemptTime(Timestamp.newBuilder().build())
.setRetryConfig(RetryConfig.newBuilder().build())
.setAttemptDeadline(Duration.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
Job actualResponse = client.runJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void runJobExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "projects/project-3306/locations/location-3306/jobs/job-3306";
client.runJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("projects/project-3664")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listLocationsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("projects/project-3664")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
GetLocationRequest request =
GetLocationRequest.newBuilder()
.setName("projects/project-9062/locations/location-9062")
.build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getLocationExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
GetLocationRequest request =
GetLocationRequest.newBuilder()
.setName("projects/project-9062/locations/location-9062")
.build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 36,377 | java-workflows/google-cloud-workflows/src/main/java/com/google/cloud/workflows/v1/stub/HttpJsonWorkflowsStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.workflows.v1.stub;
import static com.google.cloud.workflows.v1.WorkflowsClient.ListLocationsPagedResponse;
import static com.google.cloud.workflows.v1.WorkflowsClient.ListWorkflowRevisionsPagedResponse;
import static com.google.cloud.workflows.v1.WorkflowsClient.ListWorkflowsPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.workflows.v1.CreateWorkflowRequest;
import com.google.cloud.workflows.v1.DeleteWorkflowRequest;
import com.google.cloud.workflows.v1.GetWorkflowRequest;
import com.google.cloud.workflows.v1.ListWorkflowRevisionsRequest;
import com.google.cloud.workflows.v1.ListWorkflowRevisionsResponse;
import com.google.cloud.workflows.v1.ListWorkflowsRequest;
import com.google.cloud.workflows.v1.ListWorkflowsResponse;
import com.google.cloud.workflows.v1.OperationMetadata;
import com.google.cloud.workflows.v1.UpdateWorkflowRequest;
import com.google.cloud.workflows.v1.Workflow;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the Workflows service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonWorkflowsStub extends WorkflowsStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(Empty.getDescriptor())
.add(Workflow.getDescriptor())
.add(OperationMetadata.getDescriptor())
.build();
private static final ApiMethodDescriptor<ListWorkflowsRequest, ListWorkflowsResponse>
listWorkflowsMethodDescriptor =
ApiMethodDescriptor.<ListWorkflowsRequest, ListWorkflowsResponse>newBuilder()
.setFullMethodName("google.cloud.workflows.v1.Workflows/ListWorkflows")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListWorkflowsRequest>newBuilder()
.setPath(
"/v1/{parent=projects/*/locations/*}/workflows",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListWorkflowsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListWorkflowsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "filter", request.getFilter());
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListWorkflowsResponse>newBuilder()
.setDefaultInstance(ListWorkflowsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetWorkflowRequest, Workflow>
getWorkflowMethodDescriptor =
ApiMethodDescriptor.<GetWorkflowRequest, Workflow>newBuilder()
.setFullMethodName("google.cloud.workflows.v1.Workflows/GetWorkflow")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetWorkflowRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/workflows/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "revisionId", request.getRevisionId());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Workflow>newBuilder()
.setDefaultInstance(Workflow.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<CreateWorkflowRequest, Operation>
createWorkflowMethodDescriptor =
ApiMethodDescriptor.<CreateWorkflowRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.workflows.v1.Workflows/CreateWorkflow")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateWorkflowRequest>newBuilder()
.setPath(
"/v1/{parent=projects/*/locations/*}/workflows",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "workflowId", request.getWorkflowId());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("workflow", request.getWorkflow(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(CreateWorkflowRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<DeleteWorkflowRequest, Operation>
deleteWorkflowMethodDescriptor =
ApiMethodDescriptor.<DeleteWorkflowRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.workflows.v1.Workflows/DeleteWorkflow")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteWorkflowRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/workflows/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteWorkflowRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<UpdateWorkflowRequest, Operation>
updateWorkflowMethodDescriptor =
ApiMethodDescriptor.<UpdateWorkflowRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.workflows.v1.Workflows/UpdateWorkflow")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateWorkflowRequest>newBuilder()
.setPath(
"/v1/{workflow.name=projects/*/locations/*/workflows/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "workflow.name", request.getWorkflow().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateWorkflowRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("workflow", request.getWorkflow(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(UpdateWorkflowRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<
ListWorkflowRevisionsRequest, ListWorkflowRevisionsResponse>
listWorkflowRevisionsMethodDescriptor =
ApiMethodDescriptor
.<ListWorkflowRevisionsRequest, ListWorkflowRevisionsResponse>newBuilder()
.setFullMethodName("google.cloud.workflows.v1.Workflows/ListWorkflowRevisions")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListWorkflowRevisionsRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/workflows/*}:listRevisions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListWorkflowRevisionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListWorkflowRevisionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListWorkflowRevisionsResponse>newBuilder()
.setDefaultInstance(ListWorkflowRevisionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder()
.setPath(
"/v1/{name=projects/*}/locations",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListLocationsResponse>newBuilder()
.setDefaultInstance(ListLocationsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetLocationRequest, Location>
getLocationMethodDescriptor =
ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Location>newBuilder()
.setDefaultInstance(Location.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<ListWorkflowsRequest, ListWorkflowsResponse> listWorkflowsCallable;
private final UnaryCallable<ListWorkflowsRequest, ListWorkflowsPagedResponse>
listWorkflowsPagedCallable;
private final UnaryCallable<GetWorkflowRequest, Workflow> getWorkflowCallable;
private final UnaryCallable<CreateWorkflowRequest, Operation> createWorkflowCallable;
private final OperationCallable<CreateWorkflowRequest, Workflow, OperationMetadata>
createWorkflowOperationCallable;
private final UnaryCallable<DeleteWorkflowRequest, Operation> deleteWorkflowCallable;
private final OperationCallable<DeleteWorkflowRequest, Empty, OperationMetadata>
deleteWorkflowOperationCallable;
private final UnaryCallable<UpdateWorkflowRequest, Operation> updateWorkflowCallable;
private final OperationCallable<UpdateWorkflowRequest, Workflow, OperationMetadata>
updateWorkflowOperationCallable;
private final UnaryCallable<ListWorkflowRevisionsRequest, ListWorkflowRevisionsResponse>
listWorkflowRevisionsCallable;
private final UnaryCallable<ListWorkflowRevisionsRequest, ListWorkflowRevisionsPagedResponse>
listWorkflowRevisionsPagedCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonWorkflowsStub create(WorkflowsStubSettings settings)
throws IOException {
return new HttpJsonWorkflowsStub(settings, ClientContext.create(settings));
}
public static final HttpJsonWorkflowsStub create(ClientContext clientContext) throws IOException {
return new HttpJsonWorkflowsStub(
WorkflowsStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonWorkflowsStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonWorkflowsStub(
WorkflowsStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonWorkflowsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonWorkflowsStub(WorkflowsStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonWorkflowsCallableFactory());
}
/**
* Constructs an instance of HttpJsonWorkflowsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonWorkflowsStub(
WorkflowsStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.DeleteOperation",
HttpRule.newBuilder()
.setDelete("/v1/{name=projects/*/locations/*/operations/*}")
.build())
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet("/v1/{name=projects/*/locations/*/operations/*}")
.build())
.put(
"google.longrunning.Operations.ListOperations",
HttpRule.newBuilder()
.setGet("/v1/{name=projects/*/locations/*}/operations")
.build())
.build());
HttpJsonCallSettings<ListWorkflowsRequest, ListWorkflowsResponse>
listWorkflowsTransportSettings =
HttpJsonCallSettings.<ListWorkflowsRequest, ListWorkflowsResponse>newBuilder()
.setMethodDescriptor(listWorkflowsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetWorkflowRequest, Workflow> getWorkflowTransportSettings =
HttpJsonCallSettings.<GetWorkflowRequest, Workflow>newBuilder()
.setMethodDescriptor(getWorkflowMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<CreateWorkflowRequest, Operation> createWorkflowTransportSettings =
HttpJsonCallSettings.<CreateWorkflowRequest, Operation>newBuilder()
.setMethodDescriptor(createWorkflowMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteWorkflowRequest, Operation> deleteWorkflowTransportSettings =
HttpJsonCallSettings.<DeleteWorkflowRequest, Operation>newBuilder()
.setMethodDescriptor(deleteWorkflowMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateWorkflowRequest, Operation> updateWorkflowTransportSettings =
HttpJsonCallSettings.<UpdateWorkflowRequest, Operation>newBuilder()
.setMethodDescriptor(updateWorkflowMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("workflow.name", String.valueOf(request.getWorkflow().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListWorkflowRevisionsRequest, ListWorkflowRevisionsResponse>
listWorkflowRevisionsTransportSettings =
HttpJsonCallSettings
.<ListWorkflowRevisionsRequest, ListWorkflowRevisionsResponse>newBuilder()
.setMethodDescriptor(listWorkflowRevisionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse>
listLocationsTransportSettings =
HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listWorkflowsCallable =
callableFactory.createUnaryCallable(
listWorkflowsTransportSettings, settings.listWorkflowsSettings(), clientContext);
this.listWorkflowsPagedCallable =
callableFactory.createPagedCallable(
listWorkflowsTransportSettings, settings.listWorkflowsSettings(), clientContext);
this.getWorkflowCallable =
callableFactory.createUnaryCallable(
getWorkflowTransportSettings, settings.getWorkflowSettings(), clientContext);
this.createWorkflowCallable =
callableFactory.createUnaryCallable(
createWorkflowTransportSettings, settings.createWorkflowSettings(), clientContext);
this.createWorkflowOperationCallable =
callableFactory.createOperationCallable(
createWorkflowTransportSettings,
settings.createWorkflowOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.deleteWorkflowCallable =
callableFactory.createUnaryCallable(
deleteWorkflowTransportSettings, settings.deleteWorkflowSettings(), clientContext);
this.deleteWorkflowOperationCallable =
callableFactory.createOperationCallable(
deleteWorkflowTransportSettings,
settings.deleteWorkflowOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.updateWorkflowCallable =
callableFactory.createUnaryCallable(
updateWorkflowTransportSettings, settings.updateWorkflowSettings(), clientContext);
this.updateWorkflowOperationCallable =
callableFactory.createOperationCallable(
updateWorkflowTransportSettings,
settings.updateWorkflowOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listWorkflowRevisionsCallable =
callableFactory.createUnaryCallable(
listWorkflowRevisionsTransportSettings,
settings.listWorkflowRevisionsSettings(),
clientContext);
this.listWorkflowRevisionsPagedCallable =
callableFactory.createPagedCallable(
listWorkflowRevisionsTransportSettings,
settings.listWorkflowRevisionsSettings(),
clientContext);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(listWorkflowsMethodDescriptor);
methodDescriptors.add(getWorkflowMethodDescriptor);
methodDescriptors.add(createWorkflowMethodDescriptor);
methodDescriptors.add(deleteWorkflowMethodDescriptor);
methodDescriptors.add(updateWorkflowMethodDescriptor);
methodDescriptors.add(listWorkflowRevisionsMethodDescriptor);
methodDescriptors.add(listLocationsMethodDescriptor);
methodDescriptors.add(getLocationMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<ListWorkflowsRequest, ListWorkflowsResponse> listWorkflowsCallable() {
return listWorkflowsCallable;
}
@Override
public UnaryCallable<ListWorkflowsRequest, ListWorkflowsPagedResponse>
listWorkflowsPagedCallable() {
return listWorkflowsPagedCallable;
}
@Override
public UnaryCallable<GetWorkflowRequest, Workflow> getWorkflowCallable() {
return getWorkflowCallable;
}
@Override
public UnaryCallable<CreateWorkflowRequest, Operation> createWorkflowCallable() {
return createWorkflowCallable;
}
@Override
public OperationCallable<CreateWorkflowRequest, Workflow, OperationMetadata>
createWorkflowOperationCallable() {
return createWorkflowOperationCallable;
}
@Override
public UnaryCallable<DeleteWorkflowRequest, Operation> deleteWorkflowCallable() {
return deleteWorkflowCallable;
}
@Override
public OperationCallable<DeleteWorkflowRequest, Empty, OperationMetadata>
deleteWorkflowOperationCallable() {
return deleteWorkflowOperationCallable;
}
@Override
public UnaryCallable<UpdateWorkflowRequest, Operation> updateWorkflowCallable() {
return updateWorkflowCallable;
}
@Override
public OperationCallable<UpdateWorkflowRequest, Workflow, OperationMetadata>
updateWorkflowOperationCallable() {
return updateWorkflowOperationCallable;
}
@Override
public UnaryCallable<ListWorkflowRevisionsRequest, ListWorkflowRevisionsResponse>
listWorkflowRevisionsCallable() {
return listWorkflowRevisionsCallable;
}
@Override
public UnaryCallable<ListWorkflowRevisionsRequest, ListWorkflowRevisionsPagedResponse>
listWorkflowRevisionsPagedCallable() {
return listWorkflowRevisionsPagedCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 36,076 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/ListAclsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/managed_kafka.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Response for ListAcls.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.ListAclsResponse}
*/
public final class ListAclsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.ListAclsResponse)
ListAclsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAclsResponse.newBuilder() to construct.
private ListAclsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAclsResponse() {
acls_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAclsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListAclsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListAclsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.ListAclsResponse.class,
com.google.cloud.managedkafka.v1.ListAclsResponse.Builder.class);
}
public static final int ACLS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.managedkafka.v1.Acl> acls_;
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.managedkafka.v1.Acl> getAclsList() {
return acls_;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.managedkafka.v1.AclOrBuilder>
getAclsOrBuilderList() {
return acls_;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
@java.lang.Override
public int getAclsCount() {
return acls_.size();
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.Acl getAcls(int index) {
return acls_.get(index);
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.AclOrBuilder getAclsOrBuilder(int index) {
return acls_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < acls_.size(); i++) {
output.writeMessage(1, acls_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < acls_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, acls_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.ListAclsResponse)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.ListAclsResponse other =
(com.google.cloud.managedkafka.v1.ListAclsResponse) obj;
if (!getAclsList().equals(other.getAclsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAclsCount() > 0) {
hash = (37 * hash) + ACLS_FIELD_NUMBER;
hash = (53 * hash) + getAclsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.ListAclsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListAcls.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.ListAclsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.ListAclsResponse)
com.google.cloud.managedkafka.v1.ListAclsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListAclsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListAclsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.ListAclsResponse.class,
com.google.cloud.managedkafka.v1.ListAclsResponse.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.ListAclsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (aclsBuilder_ == null) {
acls_ = java.util.Collections.emptyList();
} else {
acls_ = null;
aclsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_ListAclsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListAclsResponse getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.ListAclsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListAclsResponse build() {
com.google.cloud.managedkafka.v1.ListAclsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListAclsResponse buildPartial() {
com.google.cloud.managedkafka.v1.ListAclsResponse result =
new com.google.cloud.managedkafka.v1.ListAclsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.managedkafka.v1.ListAclsResponse result) {
if (aclsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
acls_ = java.util.Collections.unmodifiableList(acls_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.acls_ = acls_;
} else {
result.acls_ = aclsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.managedkafka.v1.ListAclsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.ListAclsResponse) {
return mergeFrom((com.google.cloud.managedkafka.v1.ListAclsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.ListAclsResponse other) {
if (other == com.google.cloud.managedkafka.v1.ListAclsResponse.getDefaultInstance())
return this;
if (aclsBuilder_ == null) {
if (!other.acls_.isEmpty()) {
if (acls_.isEmpty()) {
acls_ = other.acls_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAclsIsMutable();
acls_.addAll(other.acls_);
}
onChanged();
}
} else {
if (!other.acls_.isEmpty()) {
if (aclsBuilder_.isEmpty()) {
aclsBuilder_.dispose();
aclsBuilder_ = null;
acls_ = other.acls_;
bitField0_ = (bitField0_ & ~0x00000001);
aclsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAclsFieldBuilder()
: null;
} else {
aclsBuilder_.addAllMessages(other.acls_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.managedkafka.v1.Acl m =
input.readMessage(
com.google.cloud.managedkafka.v1.Acl.parser(), extensionRegistry);
if (aclsBuilder_ == null) {
ensureAclsIsMutable();
acls_.add(m);
} else {
aclsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.managedkafka.v1.Acl> acls_ =
java.util.Collections.emptyList();
private void ensureAclsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
acls_ = new java.util.ArrayList<com.google.cloud.managedkafka.v1.Acl>(acls_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Acl,
com.google.cloud.managedkafka.v1.Acl.Builder,
com.google.cloud.managedkafka.v1.AclOrBuilder>
aclsBuilder_;
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.Acl> getAclsList() {
if (aclsBuilder_ == null) {
return java.util.Collections.unmodifiableList(acls_);
} else {
return aclsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public int getAclsCount() {
if (aclsBuilder_ == null) {
return acls_.size();
} else {
return aclsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Acl getAcls(int index) {
if (aclsBuilder_ == null) {
return acls_.get(index);
} else {
return aclsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder setAcls(int index, com.google.cloud.managedkafka.v1.Acl value) {
if (aclsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAclsIsMutable();
acls_.set(index, value);
onChanged();
} else {
aclsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder setAcls(
int index, com.google.cloud.managedkafka.v1.Acl.Builder builderForValue) {
if (aclsBuilder_ == null) {
ensureAclsIsMutable();
acls_.set(index, builderForValue.build());
onChanged();
} else {
aclsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder addAcls(com.google.cloud.managedkafka.v1.Acl value) {
if (aclsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAclsIsMutable();
acls_.add(value);
onChanged();
} else {
aclsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder addAcls(int index, com.google.cloud.managedkafka.v1.Acl value) {
if (aclsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAclsIsMutable();
acls_.add(index, value);
onChanged();
} else {
aclsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder addAcls(com.google.cloud.managedkafka.v1.Acl.Builder builderForValue) {
if (aclsBuilder_ == null) {
ensureAclsIsMutable();
acls_.add(builderForValue.build());
onChanged();
} else {
aclsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder addAcls(
int index, com.google.cloud.managedkafka.v1.Acl.Builder builderForValue) {
if (aclsBuilder_ == null) {
ensureAclsIsMutable();
acls_.add(index, builderForValue.build());
onChanged();
} else {
aclsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder addAllAcls(
java.lang.Iterable<? extends com.google.cloud.managedkafka.v1.Acl> values) {
if (aclsBuilder_ == null) {
ensureAclsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, acls_);
onChanged();
} else {
aclsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder clearAcls() {
if (aclsBuilder_ == null) {
acls_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
aclsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public Builder removeAcls(int index) {
if (aclsBuilder_ == null) {
ensureAclsIsMutable();
acls_.remove(index);
onChanged();
} else {
aclsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Acl.Builder getAclsBuilder(int index) {
return getAclsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public com.google.cloud.managedkafka.v1.AclOrBuilder getAclsOrBuilder(int index) {
if (aclsBuilder_ == null) {
return acls_.get(index);
} else {
return aclsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public java.util.List<? extends com.google.cloud.managedkafka.v1.AclOrBuilder>
getAclsOrBuilderList() {
if (aclsBuilder_ != null) {
return aclsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(acls_);
}
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Acl.Builder addAclsBuilder() {
return getAclsFieldBuilder()
.addBuilder(com.google.cloud.managedkafka.v1.Acl.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Acl.Builder addAclsBuilder(int index) {
return getAclsFieldBuilder()
.addBuilder(index, com.google.cloud.managedkafka.v1.Acl.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of acls in the requested parent. The order of the acls is
* unspecified.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Acl acls = 1;</code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.Acl.Builder> getAclsBuilderList() {
return getAclsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Acl,
com.google.cloud.managedkafka.v1.Acl.Builder,
com.google.cloud.managedkafka.v1.AclOrBuilder>
getAclsFieldBuilder() {
if (aclsBuilder_ == null) {
aclsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Acl,
com.google.cloud.managedkafka.v1.Acl.Builder,
com.google.cloud.managedkafka.v1.AclOrBuilder>(
acls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
acls_ = null;
}
return aclsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.ListAclsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.ListAclsResponse)
private static final com.google.cloud.managedkafka.v1.ListAclsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.ListAclsResponse();
}
public static com.google.cloud.managedkafka.v1.ListAclsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAclsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAclsResponse>() {
@java.lang.Override
public ListAclsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAclsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAclsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListAclsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,165 | java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/UpdateRuleDeploymentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/chronicle/v1/rule.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.chronicle.v1;
/**
*
*
* <pre>
* Request message for UpdateRuleDeployment.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.UpdateRuleDeploymentRequest}
*/
public final class UpdateRuleDeploymentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.UpdateRuleDeploymentRequest)
UpdateRuleDeploymentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateRuleDeploymentRequest.newBuilder() to construct.
private UpdateRuleDeploymentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateRuleDeploymentRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateRuleDeploymentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_UpdateRuleDeploymentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_UpdateRuleDeploymentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.class,
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.Builder.class);
}
private int bitField0_;
public static final int RULE_DEPLOYMENT_FIELD_NUMBER = 1;
private com.google.cloud.chronicle.v1.RuleDeployment ruleDeployment_;
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the ruleDeployment field is set.
*/
@java.lang.Override
public boolean hasRuleDeployment() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The ruleDeployment.
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.RuleDeployment getRuleDeployment() {
return ruleDeployment_ == null
? com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance()
: ruleDeployment_;
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder getRuleDeploymentOrBuilder() {
return ruleDeployment_ == null
? com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance()
: ruleDeployment_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRuleDeployment());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRuleDeployment());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest)) {
return super.equals(obj);
}
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest other =
(com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest) obj;
if (hasRuleDeployment() != other.hasRuleDeployment()) return false;
if (hasRuleDeployment()) {
if (!getRuleDeployment().equals(other.getRuleDeployment())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRuleDeployment()) {
hash = (37 * hash) + RULE_DEPLOYMENT_FIELD_NUMBER;
hash = (53 * hash) + getRuleDeployment().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateRuleDeployment.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.UpdateRuleDeploymentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.UpdateRuleDeploymentRequest)
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_UpdateRuleDeploymentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_UpdateRuleDeploymentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.class,
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.Builder.class);
}
// Construct using com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRuleDeploymentFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
ruleDeployment_ = null;
if (ruleDeploymentBuilder_ != null) {
ruleDeploymentBuilder_.dispose();
ruleDeploymentBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_UpdateRuleDeploymentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest getDefaultInstanceForType() {
return com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest build() {
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest buildPartial() {
com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest result =
new com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.ruleDeployment_ =
ruleDeploymentBuilder_ == null ? ruleDeployment_ : ruleDeploymentBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest) {
return mergeFrom((com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest other) {
if (other == com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest.getDefaultInstance())
return this;
if (other.hasRuleDeployment()) {
mergeRuleDeployment(other.getRuleDeployment());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRuleDeploymentFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.chronicle.v1.RuleDeployment ruleDeployment_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.chronicle.v1.RuleDeployment,
com.google.cloud.chronicle.v1.RuleDeployment.Builder,
com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>
ruleDeploymentBuilder_;
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the ruleDeployment field is set.
*/
public boolean hasRuleDeployment() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The ruleDeployment.
*/
public com.google.cloud.chronicle.v1.RuleDeployment getRuleDeployment() {
if (ruleDeploymentBuilder_ == null) {
return ruleDeployment_ == null
? com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance()
: ruleDeployment_;
} else {
return ruleDeploymentBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRuleDeployment(com.google.cloud.chronicle.v1.RuleDeployment value) {
if (ruleDeploymentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ruleDeployment_ = value;
} else {
ruleDeploymentBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRuleDeployment(
com.google.cloud.chronicle.v1.RuleDeployment.Builder builderForValue) {
if (ruleDeploymentBuilder_ == null) {
ruleDeployment_ = builderForValue.build();
} else {
ruleDeploymentBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRuleDeployment(com.google.cloud.chronicle.v1.RuleDeployment value) {
if (ruleDeploymentBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& ruleDeployment_ != null
&& ruleDeployment_
!= com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance()) {
getRuleDeploymentBuilder().mergeFrom(value);
} else {
ruleDeployment_ = value;
}
} else {
ruleDeploymentBuilder_.mergeFrom(value);
}
if (ruleDeployment_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRuleDeployment() {
bitField0_ = (bitField0_ & ~0x00000001);
ruleDeployment_ = null;
if (ruleDeploymentBuilder_ != null) {
ruleDeploymentBuilder_.dispose();
ruleDeploymentBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.chronicle.v1.RuleDeployment.Builder getRuleDeploymentBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRuleDeploymentFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder getRuleDeploymentOrBuilder() {
if (ruleDeploymentBuilder_ != null) {
return ruleDeploymentBuilder_.getMessageOrBuilder();
} else {
return ruleDeployment_ == null
? com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance()
: ruleDeployment_;
}
}
/**
*
*
* <pre>
* Required. The rule deployment to update.
*
* The rule deployment's `name` field is used to identify the rule deployment
* to update. Format:
* `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}/deployment`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.RuleDeployment rule_deployment = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.chronicle.v1.RuleDeployment,
com.google.cloud.chronicle.v1.RuleDeployment.Builder,
com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>
getRuleDeploymentFieldBuilder() {
if (ruleDeploymentBuilder_ == null) {
ruleDeploymentBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.chronicle.v1.RuleDeployment,
com.google.cloud.chronicle.v1.RuleDeployment.Builder,
com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>(
getRuleDeployment(), getParentForChildren(), isClean());
ruleDeployment_ = null;
}
return ruleDeploymentBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.UpdateRuleDeploymentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.UpdateRuleDeploymentRequest)
private static final com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest();
}
public static com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateRuleDeploymentRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateRuleDeploymentRequest>() {
@java.lang.Override
public UpdateRuleDeploymentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateRuleDeploymentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateRuleDeploymentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateRuleDeploymentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/nashorn | 36,403 | src/org.openjdk.nashorn/share/classes/org/openjdk/nashorn/internal/codegen/Lower.java | /*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.nashorn.internal.codegen;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.EVAL;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.RETURN;
import static org.openjdk.nashorn.internal.ir.Expression.isAlwaysTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.ListIterator;
import java.util.regex.Pattern;
import org.openjdk.nashorn.internal.ir.AccessNode;
import org.openjdk.nashorn.internal.ir.BaseNode;
import org.openjdk.nashorn.internal.ir.BinaryNode;
import org.openjdk.nashorn.internal.ir.Block;
import org.openjdk.nashorn.internal.ir.BlockLexicalContext;
import org.openjdk.nashorn.internal.ir.BlockStatement;
import org.openjdk.nashorn.internal.ir.BreakNode;
import org.openjdk.nashorn.internal.ir.CallNode;
import org.openjdk.nashorn.internal.ir.CaseNode;
import org.openjdk.nashorn.internal.ir.CatchNode;
import org.openjdk.nashorn.internal.ir.ClassNode;
import org.openjdk.nashorn.internal.ir.ContinueNode;
import org.openjdk.nashorn.internal.ir.DebuggerNode;
import org.openjdk.nashorn.internal.ir.EmptyNode;
import org.openjdk.nashorn.internal.ir.Expression;
import org.openjdk.nashorn.internal.ir.ExpressionStatement;
import org.openjdk.nashorn.internal.ir.ForNode;
import org.openjdk.nashorn.internal.ir.FunctionNode;
import org.openjdk.nashorn.internal.ir.IdentNode;
import org.openjdk.nashorn.internal.ir.IfNode;
import org.openjdk.nashorn.internal.ir.IndexNode;
import org.openjdk.nashorn.internal.ir.JumpStatement;
import org.openjdk.nashorn.internal.ir.JumpToInlinedFinally;
import org.openjdk.nashorn.internal.ir.LabelNode;
import org.openjdk.nashorn.internal.ir.LexicalContext;
import org.openjdk.nashorn.internal.ir.LiteralNode;
import org.openjdk.nashorn.internal.ir.LiteralNode.ArrayLiteralNode;
import org.openjdk.nashorn.internal.ir.LiteralNode.PrimitiveLiteralNode;
import org.openjdk.nashorn.internal.ir.LoopNode;
import org.openjdk.nashorn.internal.ir.Node;
import org.openjdk.nashorn.internal.ir.ObjectNode;
import org.openjdk.nashorn.internal.ir.ReturnNode;
import org.openjdk.nashorn.internal.ir.RuntimeNode;
import org.openjdk.nashorn.internal.ir.Statement;
import org.openjdk.nashorn.internal.ir.SwitchNode;
import org.openjdk.nashorn.internal.ir.Symbol;
import org.openjdk.nashorn.internal.ir.ThrowNode;
import org.openjdk.nashorn.internal.ir.TryNode;
import org.openjdk.nashorn.internal.ir.UnaryNode;
import org.openjdk.nashorn.internal.ir.VarNode;
import org.openjdk.nashorn.internal.ir.WhileNode;
import org.openjdk.nashorn.internal.ir.WithNode;
import org.openjdk.nashorn.internal.ir.visitor.NodeOperatorVisitor;
import org.openjdk.nashorn.internal.ir.visitor.SimpleNodeVisitor;
import org.openjdk.nashorn.internal.parser.Token;
import org.openjdk.nashorn.internal.parser.TokenType;
import org.openjdk.nashorn.internal.runtime.Context;
import org.openjdk.nashorn.internal.runtime.ECMAErrors;
import org.openjdk.nashorn.internal.runtime.ErrorManager;
import org.openjdk.nashorn.internal.runtime.JSType;
import org.openjdk.nashorn.internal.runtime.Source;
import org.openjdk.nashorn.internal.runtime.logging.DebugLogger;
import org.openjdk.nashorn.internal.runtime.logging.Loggable;
import org.openjdk.nashorn.internal.runtime.logging.Logger;
/**
* Lower to more primitive operations. After lowering, an AST still has no symbols
* and types, but several nodes have been turned into more low level constructs
* and control flow termination criteria have been computed.
*
* We do things like code copying/inlining of finallies here, as it is much
* harder and context dependent to do any code copying after symbols have been
* finalized.
*/
@Logger(name="lower")
final class Lower extends NodeOperatorVisitor<BlockLexicalContext> implements Loggable {
private final DebugLogger log;
private final boolean es6;
private final Source source;
// Conservative pattern to test if element names consist of characters valid for identifiers.
// This matches any non-zero length alphanumeric string including _ and $ and not starting with a digit.
private static final Pattern SAFE_PROPERTY_NAME = Pattern.compile("[a-zA-Z_$][\\w$]*");
/**
* Constructor.
*/
Lower(final Compiler compiler) {
super(new BlockLexicalContext() {
@Override
public List<Statement> popStatements() {
final List<Statement> newStatements = new ArrayList<>();
boolean terminated = false;
final List<Statement> statements = super.popStatements();
for (final Statement statement : statements) {
if (!terminated) {
newStatements.add(statement);
if (statement.isTerminal() || statement instanceof JumpStatement) { //TODO hasGoto? But some Loops are hasGoto too - why?
terminated = true;
}
} else {
FoldConstants.extractVarNodesFromDeadCode(statement, newStatements);
}
}
return newStatements;
}
@Override
protected Block afterSetStatements(final Block block) {
final List<Statement> stmts = block.getStatements();
for(final ListIterator<Statement> li = stmts.listIterator(stmts.size()); li.hasPrevious();) {
final Statement stmt = li.previous();
// popStatements() guarantees that the only thing after a terminal statement are uninitialized
// VarNodes. We skip past those, and set the terminal state of the block to the value of the
// terminal state of the first statement that is not an uninitialized VarNode.
if(!(stmt instanceof VarNode && ((VarNode)stmt).getInit() == null)) {
return block.setIsTerminal(this, stmt.isTerminal());
}
}
return block.setIsTerminal(this, false);
}
});
this.log = initLogger(compiler.getContext());
this.es6 = compiler.getScriptEnvironment()._es6;
this.source = compiler.getSource();
}
@Override
public DebugLogger getLogger() {
return log;
}
@Override
public DebugLogger initLogger(final Context context) {
return context.getLogger(this.getClass());
}
@Override
public boolean enterBreakNode(final BreakNode breakNode) {
addStatement(breakNode);
return false;
}
@Override
public Node leaveCallNode(final CallNode callNode) {
return checkEval(callNode.setFunction(markerFunction(callNode.getFunction())));
}
@Override
public boolean enterCatchNode(final CatchNode catchNode) {
Expression exception = catchNode.getException();
if ((exception != null) && !(exception instanceof IdentNode)) {
throwNotImplementedYet("es6.destructuring", exception);
}
return true;
}
@Override
public Node leaveCatchNode(final CatchNode catchNode) {
return addStatement(catchNode);
}
@Override
public boolean enterContinueNode(final ContinueNode continueNode) {
addStatement(continueNode);
return false;
}
@Override
public boolean enterDebuggerNode(final DebuggerNode debuggerNode) {
final int line = debuggerNode.getLineNumber();
final long token = debuggerNode.getToken();
final int finish = debuggerNode.getFinish();
addStatement(new ExpressionStatement(line, token, finish, new RuntimeNode(token, finish, RuntimeNode.Request.DEBUGGER, new ArrayList<Expression>())));
return false;
}
@Override
public boolean enterJumpToInlinedFinally(final JumpToInlinedFinally jumpToInlinedFinally) {
addStatement(jumpToInlinedFinally);
return false;
}
@Override
public boolean enterEmptyNode(final EmptyNode emptyNode) {
return false;
}
@Override
public Node leaveIndexNode(final IndexNode indexNode) {
final String name = getConstantPropertyName(indexNode.getIndex());
if (name != null) {
// If index node is a constant property name convert index node to access node.
assert indexNode.isIndex();
return new AccessNode(indexNode.getToken(), indexNode.getFinish(), indexNode.getBase(), name);
}
return super.leaveIndexNode(indexNode);
}
@Override
public Node leaveDELETE(final UnaryNode delete) {
final Expression expression = delete.getExpression();
if (expression instanceof IdentNode || expression instanceof BaseNode) {
return delete;
}
return new BinaryNode(Token.recast(delete.getToken(), TokenType.COMMARIGHT), expression,
LiteralNode.newInstance(delete.getToken(), delete.getFinish(), true));
}
// If expression is a primitive literal that is not an array index and does return its string value. Else return null.
private static String getConstantPropertyName(final Expression expression) {
if (expression instanceof LiteralNode.PrimitiveLiteralNode) {
final Object value = ((LiteralNode) expression).getValue();
if (value instanceof String && SAFE_PROPERTY_NAME.matcher((String) value).matches()) {
return (String) value;
}
}
return null;
}
@Override
public Node leaveExpressionStatement(final ExpressionStatement expressionStatement) {
final Expression expr = expressionStatement.getExpression();
ExpressionStatement node = expressionStatement;
final FunctionNode currentFunction = lc.getCurrentFunction();
if (currentFunction.isProgram()) {
if (!isInternalExpression(expr) && !isEvalResultAssignment(expr)) {
node = expressionStatement.setExpression(
new BinaryNode(
Token.recast(
expressionStatement.getToken(),
TokenType.ASSIGN),
compilerConstant(RETURN),
expr));
}
}
if (es6 && expressionStatement.destructuringDeclarationType() != null) {
throwNotImplementedYet("es6.destructuring", expressionStatement);
}
return addStatement(node);
}
@Override
public Node leaveBlockStatement(final BlockStatement blockStatement) {
return addStatement(blockStatement);
}
@Override
public boolean enterForNode(final ForNode forNode) {
if (es6 && (forNode.getInit() instanceof ObjectNode || forNode.getInit() instanceof ArrayLiteralNode)) {
throwNotImplementedYet("es6.destructuring", forNode);
}
return super.enterForNode(forNode);
}
@Override
public Node leaveForNode(final ForNode forNode) {
ForNode newForNode = forNode;
final Expression test = forNode.getTest();
if (!forNode.isForInOrOf() && isAlwaysTrue(test)) {
newForNode = forNode.setTest(lc, null);
}
newForNode = checkEscape(newForNode);
if(!es6 && newForNode.isForInOrOf()) {
// Wrap it in a block so its internally created iterator is restricted in scope, unless we are running
// in ES6 mode, in which case the parser already created a block to capture let/const declarations.
addStatementEnclosedInBlock(newForNode);
} else {
addStatement(newForNode);
}
return newForNode;
}
@Override
public boolean enterFunctionNode(final FunctionNode functionNode) {
if (es6) {
if (functionNode.getKind() == FunctionNode.Kind.MODULE) {
throwNotImplementedYet("es6.module", functionNode);
}
if (functionNode.getKind() == FunctionNode.Kind.GENERATOR) {
throwNotImplementedYet("es6.generator", functionNode);
}
if (functionNode.usesSuper()) {
throwNotImplementedYet("es6.super", functionNode);
}
final int numParams = functionNode.getNumOfParams();
if (numParams > 0) {
final IdentNode lastParam = functionNode.getParameter(numParams - 1);
if (lastParam.isRestParameter()) {
throwNotImplementedYet("es6.rest.param", lastParam);
}
}
for (final IdentNode param : functionNode.getParameters()) {
if (param.isDestructuredParameter()) {
throwNotImplementedYet("es6.destructuring", functionNode);
}
}
}
return super.enterFunctionNode(functionNode);
}
@Override
public Node leaveFunctionNode(final FunctionNode functionNode) {
log.info("END FunctionNode: ", functionNode.getName());
return functionNode;
}
@Override
public Node leaveIfNode(final IfNode ifNode) {
return addStatement(ifNode);
}
@Override
public Node leaveIN(final BinaryNode binaryNode) {
return new RuntimeNode(binaryNode);
}
@Override
public Node leaveINSTANCEOF(final BinaryNode binaryNode) {
return new RuntimeNode(binaryNode);
}
@Override
public Node leaveLabelNode(final LabelNode labelNode) {
return addStatement(labelNode);
}
@Override
public Node leaveReturnNode(final ReturnNode returnNode) {
addStatement(returnNode); //ReturnNodes are always terminal, marked as such in constructor
return returnNode;
}
@Override
public Node leaveCaseNode(final CaseNode caseNode) {
// Try to represent the case test as an integer
final Node test = caseNode.getTest();
if (test instanceof LiteralNode) {
final LiteralNode<?> lit = (LiteralNode<?>)test;
if (lit.isNumeric() && !(lit.getValue() instanceof Integer)) {
if (JSType.isRepresentableAsInt(lit.getNumber())) {
return caseNode.setTest((Expression)LiteralNode.newInstance(lit, lit.getInt32()).accept(this));
}
}
}
return caseNode;
}
@Override
public Node leaveSwitchNode(final SwitchNode switchNode) {
if(!switchNode.isUniqueInteger()) {
// Wrap it in a block so its internally created tag is restricted in scope
addStatementEnclosedInBlock(switchNode);
} else {
addStatement(switchNode);
}
return switchNode;
}
@Override
public Node leaveThrowNode(final ThrowNode throwNode) {
return addStatement(throwNode); //ThrowNodes are always terminal, marked as such in constructor
}
@SuppressWarnings("unchecked")
private static <T extends Node> T ensureUniqueNamesIn(final T node) {
return (T)node.accept(new SimpleNodeVisitor() {
@Override
public Node leaveFunctionNode(final FunctionNode functionNode) {
final String name = functionNode.getName();
return functionNode.setName(lc, lc.getCurrentFunction().uniqueName(name));
}
@Override
public Node leaveDefault(final Node labelledNode) {
return labelledNode.ensureUniqueLabels(lc);
}
});
}
private static Block createFinallyBlock(final Block finallyBody) {
final List<Statement> newStatements = new ArrayList<>();
for (final Statement statement : finallyBody.getStatements()) {
newStatements.add(statement);
if (statement.hasTerminalFlags()) {
break;
}
}
return finallyBody.setStatements(null, newStatements);
}
private Block catchAllBlock(final TryNode tryNode) {
final int lineNumber = tryNode.getLineNumber();
final long token = tryNode.getToken();
final int finish = tryNode.getFinish();
final IdentNode exception = new IdentNode(token, finish, lc.getCurrentFunction().uniqueName(CompilerConstants.EXCEPTION_PREFIX.symbolName()));
final Block catchBody = new Block(token, finish, new ThrowNode(lineNumber, token, finish, new IdentNode(exception), true));
assert catchBody.isTerminal(); //ends with throw, so terminal
final CatchNode catchAllNode = new CatchNode(lineNumber, token, finish, new IdentNode(exception), null, catchBody, true);
final Block catchAllBlock = new Block(token, finish, catchAllNode);
//catchallblock -> catchallnode (catchnode) -> exception -> throw
return (Block)catchAllBlock.accept(this); //not accepted. has to be accepted by lower
}
private IdentNode compilerConstant(final CompilerConstants cc) {
final FunctionNode functionNode = lc.getCurrentFunction();
return new IdentNode(functionNode.getToken(), functionNode.getFinish(), cc.symbolName());
}
private static boolean isTerminalFinally(final Block finallyBlock) {
return finallyBlock.getLastStatement().hasTerminalFlags();
}
/**
* Splice finally code into all endpoints of a trynode
* @param tryNode the try node
* @param rethrow the rethrowing throw nodes from the synthetic catch block
* @param finallyBody the code in the original finally block
* @return new try node after splicing finally code (same if nop)
*/
private TryNode spliceFinally(final TryNode tryNode, final ThrowNode rethrow, final Block finallyBody) {
assert tryNode.getFinallyBody() == null;
final Block finallyBlock = createFinallyBlock(finallyBody);
final ArrayList<Block> inlinedFinallies = new ArrayList<>();
final FunctionNode fn = lc.getCurrentFunction();
final TryNode newTryNode = (TryNode)tryNode.accept(new SimpleNodeVisitor() {
@Override
public boolean enterFunctionNode(final FunctionNode functionNode) {
// do not enter function nodes - finally code should not be inlined into them
return false;
}
@Override
public Node leaveThrowNode(final ThrowNode throwNode) {
if (rethrow == throwNode) {
return new BlockStatement(prependFinally(finallyBlock, throwNode));
}
return throwNode;
}
@Override
public Node leaveBreakNode(final BreakNode breakNode) {
return leaveJumpStatement(breakNode);
}
@Override
public Node leaveContinueNode(final ContinueNode continueNode) {
return leaveJumpStatement(continueNode);
}
private Node leaveJumpStatement(final JumpStatement jump) {
// NOTE: leaveJumpToInlinedFinally deliberately does not delegate to this method, only break and
// continue are edited. JTIF nodes should not be changed, rather the surroundings of
// break/continue/return that were moved into the inlined finally block itself will be changed.
// If this visitor's lc doesn't find the target of the jump, it means it's external to the try block.
if (jump.getTarget(lc) == null) {
return createJumpToInlinedFinally(fn, inlinedFinallies, prependFinally(finallyBlock, jump));
}
return jump;
}
@Override
public Node leaveReturnNode(final ReturnNode returnNode) {
final Expression expr = returnNode.getExpression();
if (isTerminalFinally(finallyBlock)) {
if (expr == null) {
// Terminal finally; no return expression.
return createJumpToInlinedFinally(fn, inlinedFinallies, ensureUniqueNamesIn(finallyBlock));
}
// Terminal finally; has a return expression.
final List<Statement> newStatements = new ArrayList<>(2);
final int retLineNumber = returnNode.getLineNumber();
final long retToken = returnNode.getToken();
// Expression is evaluated for side effects.
newStatements.add(new ExpressionStatement(retLineNumber, retToken, returnNode.getFinish(), expr));
newStatements.add(createJumpToInlinedFinally(fn, inlinedFinallies, ensureUniqueNamesIn(finallyBlock)));
return new BlockStatement(retLineNumber, new Block(retToken, finallyBlock.getFinish(), newStatements));
} else if (expr == null || expr instanceof PrimitiveLiteralNode<?> || (expr instanceof IdentNode && RETURN.symbolName().equals(((IdentNode)expr).getName()))) {
// Nonterminal finally; no return expression, or returns a primitive literal, or returns :return.
// Just move the return expression into the finally block.
return createJumpToInlinedFinally(fn, inlinedFinallies, prependFinally(finallyBlock, returnNode));
} else {
// We need to evaluate the result of the return in case it is complex while still in the try block,
// store it in :return, and return it afterwards.
final List<Statement> newStatements = new ArrayList<>();
final int retLineNumber = returnNode.getLineNumber();
final long retToken = returnNode.getToken();
final int retFinish = returnNode.getFinish();
final Expression resultNode = new IdentNode(expr.getToken(), expr.getFinish(), RETURN.symbolName());
// ":return = <expr>;"
newStatements.add(new ExpressionStatement(retLineNumber, retToken, retFinish, new BinaryNode(Token.recast(returnNode.getToken(), TokenType.ASSIGN), resultNode, expr)));
// inline finally and end it with "return :return;"
newStatements.add(createJumpToInlinedFinally(fn, inlinedFinallies, prependFinally(finallyBlock, returnNode.setExpression(resultNode))));
return new BlockStatement(retLineNumber, new Block(retToken, retFinish, newStatements));
}
}
});
addStatement(inlinedFinallies.isEmpty() ? newTryNode : newTryNode.setInlinedFinallies(lc, inlinedFinallies));
// TODO: if finallyStatement is terminal, we could just have sites of inlined finallies jump here.
addStatement(new BlockStatement(finallyBlock));
return newTryNode;
}
private static JumpToInlinedFinally createJumpToInlinedFinally(final FunctionNode fn, final List<Block> inlinedFinallies, final Block finallyBlock) {
final String labelName = fn.uniqueName(":finally");
final long token = finallyBlock.getToken();
final int finish = finallyBlock.getFinish();
inlinedFinallies.add(new Block(token, finish, new LabelNode(finallyBlock.getFirstStatementLineNumber(),
token, finish, labelName, finallyBlock)));
return new JumpToInlinedFinally(labelName);
}
private static Block prependFinally(final Block finallyBlock, final Statement statement) {
final Block inlinedFinally = ensureUniqueNamesIn(finallyBlock);
if (isTerminalFinally(finallyBlock)) {
return inlinedFinally;
}
final List<Statement> stmts = inlinedFinally.getStatements();
final List<Statement> newStmts = new ArrayList<>(stmts.size() + 1);
newStmts.addAll(stmts);
newStmts.add(statement);
return new Block(inlinedFinally.getToken(), statement.getFinish(), newStmts);
}
@Override
public Node leaveTryNode(final TryNode tryNode) {
final Block finallyBody = tryNode.getFinallyBody();
TryNode newTryNode = tryNode.setFinallyBody(lc, null);
// No finally or empty finally
if (finallyBody == null || finallyBody.getStatementCount() == 0) {
final List<CatchNode> catches = newTryNode.getCatches();
if (catches == null || catches.isEmpty()) {
// A completely degenerate try block: empty finally, no catches. Replace it with try body.
return addStatement(new BlockStatement(tryNode.getBody()));
}
return addStatement(ensureUnconditionalCatch(newTryNode));
}
/*
* create a new try node
* if we have catches:
*
* try try
* x try
* catch x
* y catch
* finally z y
* catchall
* rethrow
*
* otherwise
*
* try try
* x x
* finally catchall
* y rethrow
*
*
* now splice in finally code wherever needed
*
*/
final Block catchAll = catchAllBlock(tryNode);
final List<ThrowNode> rethrows = new ArrayList<>(1);
catchAll.accept(new SimpleNodeVisitor() {
@Override
public boolean enterThrowNode(final ThrowNode throwNode) {
rethrows.add(throwNode);
return true;
}
});
assert rethrows.size() == 1;
if (!tryNode.getCatchBlocks().isEmpty()) {
final Block outerBody = new Block(newTryNode.getToken(), newTryNode.getFinish(), ensureUnconditionalCatch(newTryNode));
newTryNode = newTryNode.setBody(lc, outerBody).setCatchBlocks(lc, null);
}
newTryNode = newTryNode.setCatchBlocks(lc, Arrays.asList(catchAll));
/*
* Now that the transform is done, we have to go into the try and splice
* the finally block in front of any statement that is outside the try
*/
return (TryNode)lc.replace(tryNode, spliceFinally(newTryNode, rethrows.get(0), finallyBody));
}
private TryNode ensureUnconditionalCatch(final TryNode tryNode) {
final List<CatchNode> catches = tryNode.getCatches();
if(catches == null || catches.isEmpty() || catches.get(catches.size() - 1).getExceptionCondition() == null) {
return tryNode;
}
// If the last catch block is conditional, add an unconditional rethrow block
final List<Block> newCatchBlocks = new ArrayList<>(tryNode.getCatchBlocks());
newCatchBlocks.add(catchAllBlock(tryNode));
return tryNode.setCatchBlocks(lc, newCatchBlocks);
}
@Override
public boolean enterUnaryNode(final UnaryNode unaryNode) {
if (es6) {
if (unaryNode.isTokenType(TokenType.YIELD) ||
unaryNode.isTokenType(TokenType.YIELD_STAR)) {
throwNotImplementedYet("es6.yield", unaryNode);
} else if (unaryNode.isTokenType(TokenType.SPREAD_ARGUMENT) ||
unaryNode.isTokenType(TokenType.SPREAD_ARRAY)) {
throwNotImplementedYet("es6.spread", unaryNode);
}
}
return super.enterUnaryNode(unaryNode);
}
@Override
public boolean enterASSIGN(BinaryNode binaryNode) {
if (es6 && (binaryNode.lhs() instanceof ObjectNode || binaryNode.lhs() instanceof ArrayLiteralNode)) {
throwNotImplementedYet("es6.destructuring", binaryNode);
}
return super.enterASSIGN(binaryNode);
}
@Override
public Node leaveVarNode(final VarNode varNode) {
addStatement(varNode);
if (varNode.getFlag(VarNode.IS_LAST_FUNCTION_DECLARATION)
&& lc.getCurrentFunction().isProgram()
&& ((FunctionNode) varNode.getInit()).isAnonymous()) {
new ExpressionStatement(varNode.getLineNumber(), varNode.getToken(), varNode.getFinish(), new IdentNode(varNode.getName())).accept(this);
}
return varNode;
}
@Override
public Node leaveWhileNode(final WhileNode whileNode) {
final Expression test = whileNode.getTest();
final Block body = whileNode.getBody();
if (isAlwaysTrue(test)) {
//turn it into a for node without a test.
final ForNode forNode = (ForNode)new ForNode(whileNode.getLineNumber(), whileNode.getToken(), whileNode.getFinish(), body, 0).accept(this);
lc.replace(whileNode, forNode);
return forNode;
}
return addStatement(checkEscape(whileNode));
}
@Override
public Node leaveWithNode(final WithNode withNode) {
return addStatement(withNode);
}
@Override
public boolean enterClassNode(final ClassNode classNode) {
throwNotImplementedYet("es6.class", classNode);
return super.enterClassNode(classNode);
}
/**
* Given a function node that is a callee in a CallNode, replace it with
* the appropriate marker function. This is used by {@link CodeGenerator}
* for fast scope calls
*
* @param function function called by a CallNode
* @return transformed node to marker function or identity if not ident/access/indexnode
*/
private static Expression markerFunction(final Expression function) {
if (function instanceof IdentNode) {
return ((IdentNode)function).setIsFunction();
} else if (function instanceof BaseNode) {
return ((BaseNode)function).setIsFunction();
}
return function;
}
/**
* Calculate a synthetic eval location for a node for the stacktrace, for example src#17<eval>
* @param node a node
* @return eval location
*/
private String evalLocation(final IdentNode node) {
final Source source = lc.getCurrentFunction().getSource();
final int pos = node.position();
return source.getName() + '#' + source.getLine(pos) + ':' + source.getColumn(pos)
+ "<eval>";
}
/**
* Check whether a call node may be a call to eval. In that case we
* clone the args in order to create the following construct in
* {@link CodeGenerator}
*
* <pre>
* if (calledFuntion == buildInEval) {
* eval(cloned arg);
* } else {
* cloned arg;
* }
* </pre>
*
* @param callNode call node to check if it's an eval
*/
private CallNode checkEval(final CallNode callNode) {
if (callNode.getFunction() instanceof IdentNode) {
final List<Expression> args = callNode.getArgs();
final IdentNode callee = (IdentNode)callNode.getFunction();
// 'eval' call with at least one argument
if (!args.isEmpty() && EVAL.symbolName().equals(callee.getName())) {
final List<Expression> evalArgs = new ArrayList<>(args.size());
for(final Expression arg: args) {
evalArgs.add((Expression)ensureUniqueNamesIn(arg).accept(this));
}
return callNode.setEvalArgs(new CallNode.EvalArgs(evalArgs, evalLocation(callee)));
}
}
return callNode;
}
/**
* Helper that given a loop body makes sure that it is not terminal if it
* has a continue that leads to the loop header or to outer loops' loop
* headers. This means that, even if the body ends with a terminal
* statement, we cannot tag it as terminal
*
* @param loopBody the loop body to check
* @return true if control flow may escape the loop
*/
private static boolean controlFlowEscapes(final LexicalContext lex, final Block loopBody) {
final List<Node> escapes = new ArrayList<>();
loopBody.accept(new SimpleNodeVisitor() {
@Override
public Node leaveBreakNode(final BreakNode node) {
escapes.add(node);
return node;
}
@Override
public Node leaveContinueNode(final ContinueNode node) {
// all inner loops have been popped.
if (lex.contains(node.getTarget(lex))) {
escapes.add(node);
}
return node;
}
});
return !escapes.isEmpty();
}
@SuppressWarnings("unchecked")
private <T extends LoopNode> T checkEscape(final T loopNode) {
final boolean escapes = controlFlowEscapes(lc, loopNode.getBody());
if (escapes) {
return (T)loopNode.
setBody(lc, loopNode.getBody().setIsTerminal(lc, false)).
setControlFlowEscapes(lc, escapes);
}
return loopNode;
}
private Node addStatement(final Statement statement) {
lc.appendStatement(statement);
return statement;
}
private void addStatementEnclosedInBlock(final Statement stmt) {
BlockStatement b = BlockStatement.createReplacement(stmt, Collections.singletonList(stmt));
if(stmt.isTerminal()) {
b = b.setBlock(b.getBlock().setIsTerminal(null, true));
}
addStatement(b);
}
/**
* An internal expression has a symbol that is tagged internal. Check if
* this is such a node
*
* @param expression expression to check for internal symbol
* @return true if internal, false otherwise
*/
private static boolean isInternalExpression(final Expression expression) {
if (!(expression instanceof IdentNode)) {
return false;
}
final Symbol symbol = ((IdentNode)expression).getSymbol();
return symbol != null && symbol.isInternal();
}
/**
* Is this an assignment to the special variable that hosts scripting eval
* results, i.e. __return__?
*
* @param expression expression to check whether it is $evalresult = X
* @return true if an assignment to eval result, false otherwise
*/
private static boolean isEvalResultAssignment(final Node expression) {
if (expression instanceof BinaryNode) {
final Node lhs = ((BinaryNode)expression).lhs();
if (lhs instanceof IdentNode) {
return ((IdentNode)lhs).getName().equals(RETURN.symbolName());
}
}
return false;
}
private void throwNotImplementedYet(final String msgId, final Node node) {
final long token = node.getToken();
final int line = source.getLine(node.getStart());
final int column = source.getColumn(node.getStart());
final String message = ECMAErrors.getMessage("unimplemented." + msgId);
final String formatted = ErrorManager.format(message, source, line, column, token);
throw new RuntimeException(formatted);
}
}
|
googleapis/google-cloud-java | 36,171 | java-vmwareengine/proto-google-cloud-vmwareengine-v1/src/main/java/com/google/cloud/vmwareengine/v1/UpdateSubnetRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vmwareengine/v1/vmwareengine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vmwareengine.v1;
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.UpdateSubnetRequest}
*/
public final class UpdateSubnetRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vmwareengine.v1.UpdateSubnetRequest)
UpdateSubnetRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSubnetRequest.newBuilder() to construct.
private UpdateSubnetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSubnetRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSubnetRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_UpdateSubnetRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_UpdateSubnetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.class,
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int SUBNET_FIELD_NUMBER = 2;
private com.google.cloud.vmwareengine.v1.Subnet subnet_;
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the subnet field is set.
*/
@java.lang.Override
public boolean hasSubnet() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The subnet.
*/
@java.lang.Override
public com.google.cloud.vmwareengine.v1.Subnet getSubnet() {
return subnet_ == null ? com.google.cloud.vmwareengine.v1.Subnet.getDefaultInstance() : subnet_;
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.vmwareengine.v1.SubnetOrBuilder getSubnetOrBuilder() {
return subnet_ == null ? com.google.cloud.vmwareengine.v1.Subnet.getDefaultInstance() : subnet_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getSubnet());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSubnet());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vmwareengine.v1.UpdateSubnetRequest)) {
return super.equals(obj);
}
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest other =
(com.google.cloud.vmwareengine.v1.UpdateSubnetRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasSubnet() != other.hasSubnet()) return false;
if (hasSubnet()) {
if (!getSubnet().equals(other.getSubnet())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasSubnet()) {
hash = (37 * hash) + SUBNET_FIELD_NUMBER;
hash = (53 * hash) + getSubnet().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vmwareengine.v1.UpdateSubnetRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.UpdateSubnet][google.cloud.vmwareengine.v1.VmwareEngine.UpdateSubnet]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.UpdateSubnetRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vmwareengine.v1.UpdateSubnetRequest)
com.google.cloud.vmwareengine.v1.UpdateSubnetRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_UpdateSubnetRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_UpdateSubnetRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.class,
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.Builder.class);
}
// Construct using com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getSubnetFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
subnet_ = null;
if (subnetBuilder_ != null) {
subnetBuilder_.dispose();
subnetBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_UpdateSubnetRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.UpdateSubnetRequest getDefaultInstanceForType() {
return com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.UpdateSubnetRequest build() {
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.UpdateSubnetRequest buildPartial() {
com.google.cloud.vmwareengine.v1.UpdateSubnetRequest result =
new com.google.cloud.vmwareengine.v1.UpdateSubnetRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.vmwareengine.v1.UpdateSubnetRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.subnet_ = subnetBuilder_ == null ? subnet_ : subnetBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vmwareengine.v1.UpdateSubnetRequest) {
return mergeFrom((com.google.cloud.vmwareengine.v1.UpdateSubnetRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vmwareengine.v1.UpdateSubnetRequest other) {
if (other == com.google.cloud.vmwareengine.v1.UpdateSubnetRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasSubnet()) {
mergeSubnet(other.getSubnet());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getSubnetFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* `Subnet` resource by the update.
* The fields specified in the `update_mask` are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.vmwareengine.v1.Subnet subnet_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vmwareengine.v1.Subnet,
com.google.cloud.vmwareengine.v1.Subnet.Builder,
com.google.cloud.vmwareengine.v1.SubnetOrBuilder>
subnetBuilder_;
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the subnet field is set.
*/
public boolean hasSubnet() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The subnet.
*/
public com.google.cloud.vmwareengine.v1.Subnet getSubnet() {
if (subnetBuilder_ == null) {
return subnet_ == null
? com.google.cloud.vmwareengine.v1.Subnet.getDefaultInstance()
: subnet_;
} else {
return subnetBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSubnet(com.google.cloud.vmwareengine.v1.Subnet value) {
if (subnetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
subnet_ = value;
} else {
subnetBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSubnet(com.google.cloud.vmwareengine.v1.Subnet.Builder builderForValue) {
if (subnetBuilder_ == null) {
subnet_ = builderForValue.build();
} else {
subnetBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSubnet(com.google.cloud.vmwareengine.v1.Subnet value) {
if (subnetBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& subnet_ != null
&& subnet_ != com.google.cloud.vmwareengine.v1.Subnet.getDefaultInstance()) {
getSubnetBuilder().mergeFrom(value);
} else {
subnet_ = value;
}
} else {
subnetBuilder_.mergeFrom(value);
}
if (subnet_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSubnet() {
bitField0_ = (bitField0_ & ~0x00000002);
subnet_ = null;
if (subnetBuilder_ != null) {
subnetBuilder_.dispose();
subnetBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vmwareengine.v1.Subnet.Builder getSubnetBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSubnetFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vmwareengine.v1.SubnetOrBuilder getSubnetOrBuilder() {
if (subnetBuilder_ != null) {
return subnetBuilder_.getMessageOrBuilder();
} else {
return subnet_ == null
? com.google.cloud.vmwareengine.v1.Subnet.getDefaultInstance()
: subnet_;
}
}
/**
*
*
* <pre>
* Required. Subnet description.
* </pre>
*
* <code>
* .google.cloud.vmwareengine.v1.Subnet subnet = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vmwareengine.v1.Subnet,
com.google.cloud.vmwareengine.v1.Subnet.Builder,
com.google.cloud.vmwareengine.v1.SubnetOrBuilder>
getSubnetFieldBuilder() {
if (subnetBuilder_ == null) {
subnetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vmwareengine.v1.Subnet,
com.google.cloud.vmwareengine.v1.Subnet.Builder,
com.google.cloud.vmwareengine.v1.SubnetOrBuilder>(
getSubnet(), getParentForChildren(), isClean());
subnet_ = null;
}
return subnetBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vmwareengine.v1.UpdateSubnetRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vmwareengine.v1.UpdateSubnetRequest)
private static final com.google.cloud.vmwareengine.v1.UpdateSubnetRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vmwareengine.v1.UpdateSubnetRequest();
}
public static com.google.cloud.vmwareengine.v1.UpdateSubnetRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSubnetRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSubnetRequest>() {
@java.lang.Override
public UpdateSubnetRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSubnetRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSubnetRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.UpdateSubnetRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 36,430 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/nio/file/Path.java | /*
* Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.nio.file;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.spi.FileSystemProvider;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* An object that may be used to locate a file in a file system. It will
* typically represent a system dependent file path.
*
* <p> A {@code Path} represents a path that is hierarchical and composed of a
* sequence of directory and file name elements separated by a special separator
* or delimiter. A <em>root component</em>, that identifies a file system
* hierarchy, may also be present. The name element that is <em>farthest</em>
* from the root of the directory hierarchy is the name of a file or directory.
* The other name elements are directory names. A {@code Path} can represent a
* root, a root and a sequence of names, or simply one or more name elements.
* A {@code Path} is considered to be an <i>empty path</i> if it consists
* solely of one name element that is empty. Accessing a file using an
* <i>empty path</i> is equivalent to accessing the default directory of the
* file system. {@code Path} defines the {@link #getFileName() getFileName},
* {@link #getParent getParent}, {@link #getRoot getRoot}, and {@link #subpath
* subpath} methods to access the path components or a subsequence of its name
* elements.
*
* <p> In addition to accessing the components of a path, a {@code Path} also
* defines the {@link #resolve(Path) resolve} and {@link #resolveSibling(Path)
* resolveSibling} methods to combine paths. The {@link #relativize relativize}
* method that can be used to construct a relative path between two paths.
* Paths can be {@link #compareTo compared}, and tested against each other using
* the {@link #startsWith startsWith} and {@link #endsWith endsWith} methods.
*
* <p> This interface extends {@link Watchable} interface so that a directory
* located by a path can be {@link #register registered} with a {@link
* WatchService} and entries in the directory watched. </p>
*
* <p> <b>WARNING:</b> This interface is only intended to be implemented by
* those developing custom file system implementations. Methods may be added to
* this interface in future releases. </p>
*
* <h2>Accessing Files</h2>
* <p> Paths may be used with the {@link Files} class to operate on files,
* directories, and other types of files. For example, suppose we want a {@link
* java.io.BufferedReader} to read text from a file "{@code access.log}". The
* file is located in a directory "{@code logs}" relative to the current working
* directory and is UTF-8 encoded.
* <pre>
* Path path = FileSystems.getDefault().getPath("logs", "access.log");
* BufferedReader reader = Files.newBufferedReader(path, StandardCharsets.UTF_8);
* </pre>
*
* <a id="interop"></a><h2>Interoperability</h2>
* <p> Paths associated with the default {@link
* java.nio.file.spi.FileSystemProvider provider} are generally interoperable
* with the {@link java.io.File java.io.File} class. Paths created by other
* providers are unlikely to be interoperable with the abstract path names
* represented by {@code java.io.File}. The {@link java.io.File#toPath toPath}
* method may be used to obtain a {@code Path} from the abstract path name
* represented by a {@code java.io.File} object. The resulting {@code Path} can
* be used to operate on the same file as the {@code java.io.File} object. In
* addition, the {@link #toFile toFile} method is useful to construct a {@code
* File} from the {@code String} representation of a {@code Path}.
*
* <h2>Concurrency</h2>
* <p> Implementations of this interface are immutable and safe for use by
* multiple concurrent threads.
*
* @since 1.7
*/
public interface Path
extends Comparable<Path>, Iterable<Path>, Watchable
{
/**
* Returns the file system that created this object.
*
* @return the file system that created this object
*/
FileSystem getFileSystem();
/**
* Tells whether or not this path is absolute.
*
* <p> An absolute path is complete in that it doesn't need to be combined
* with other path information in order to locate a file.
*
* @return {@code true} if, and only if, this path is absolute
*/
boolean isAbsolute();
/**
* Returns the root component of this path as a {@code Path} object,
* or {@code null} if this path does not have a root component.
*
* @return a path representing the root component of this path,
* or {@code null}
*/
Path getRoot();
/**
* Returns the name of the file or directory denoted by this path as a
* {@code Path} object. The file name is the <em>farthest</em> element from
* the root in the directory hierarchy.
*
* @return a path representing the name of the file or directory, or
* {@code null} if this path has zero elements
*/
Path getFileName();
/**
* Returns the <em>parent path</em>, or {@code null} if this path does not
* have a parent.
*
* <p> The parent of this path object consists of this path's root
* component, if any, and each element in the path except for the
* <em>farthest</em> from the root in the directory hierarchy. This method
* does not access the file system; the path or its parent may not exist.
* Furthermore, this method does not eliminate special names such as "."
* and ".." that may be used in some implementations. On UNIX for example,
* the parent of "{@code /a/b/c}" is "{@code /a/b}", and the parent of
* {@code "x/y/.}" is "{@code x/y}". This method may be used with the {@link
* #normalize normalize} method, to eliminate redundant names, for cases where
* <em>shell-like</em> navigation is required.
*
* <p> If this path has one or more elements, and no root component, then
* this method is equivalent to evaluating the expression:
* <blockquote><pre>
* subpath(0, getNameCount()-1);
* </pre></blockquote>
*
* @return a path representing the path's parent
*/
Path getParent();
/**
* Returns the number of name elements in the path.
*
* @return the number of elements in the path, or {@code 0} if this path
* only represents a root component
*/
int getNameCount();
/**
* Returns a name element of this path as a {@code Path} object.
*
* <p> The {@code index} parameter is the index of the name element to return.
* The element that is <em>closest</em> to the root in the directory hierarchy
* has index {@code 0}. The element that is <em>farthest</em> from the root
* has index {@link #getNameCount count}{@code -1}.
*
* @param index
* the index of the element
*
* @return the name element
*
* @throws IllegalArgumentException
* if {@code index} is negative, {@code index} is greater than or
* equal to the number of elements, or this path has zero name
* elements
*/
Path getName(int index);
/**
* Returns a relative {@code Path} that is a subsequence of the name
* elements of this path.
*
* <p> The {@code beginIndex} and {@code endIndex} parameters specify the
* subsequence of name elements. The name that is <em>closest</em> to the root
* in the directory hierarchy has index {@code 0}. The name that is
* <em>farthest</em> from the root has index {@link #getNameCount
* count}{@code -1}. The returned {@code Path} object has the name elements
* that begin at {@code beginIndex} and extend to the element at index {@code
* endIndex-1}.
*
* @param beginIndex
* the index of the first element, inclusive
* @param endIndex
* the index of the last element, exclusive
*
* @return a new {@code Path} object that is a subsequence of the name
* elements in this {@code Path}
*
* @throws IllegalArgumentException
* if {@code beginIndex} is negative, or greater than or equal to
* the number of elements. If {@code endIndex} is less than or
* equal to {@code beginIndex}, or larger than the number of elements.
*/
Path subpath(int beginIndex, int endIndex);
/**
* Tests if this path starts with the given path.
*
* <p> This path <em>starts</em> with the given path if this path's root
* component <em>starts</em> with the root component of the given path,
* and this path starts with the same name elements as the given path.
* If the given path has more name elements than this path then {@code false}
* is returned.
*
* <p> Whether or not the root component of this path starts with the root
* component of the given path is file system specific. If this path does
* not have a root component and the given path has a root component then
* this path does not start with the given path.
*
* <p> If the given path is associated with a different {@code FileSystem}
* to this path then {@code false} is returned.
*
* @param other
* the given path
*
* @return {@code true} if this path starts with the given path; otherwise
* {@code false}
*/
boolean startsWith(Path other);
/**
* Tests if this path starts with a {@code Path}, constructed by converting
* the given path string, in exactly the manner specified by the {@link
* #startsWith(Path) startsWith(Path)} method. On UNIX for example, the path
* "{@code foo/bar}" starts with "{@code foo}" and "{@code foo/bar}". It
* does not start with "{@code f}" or "{@code fo}".
*
* @param other
* the given path string
*
* @return {@code true} if this path starts with the given path; otherwise
* {@code false}
*
* @throws InvalidPathException
* If the path string cannot be converted to a Path.
*/
boolean startsWith(String other);
/**
* Tests if this path ends with the given path.
*
* <p> If the given path has <em>N</em> elements, and no root component,
* and this path has <em>N</em> or more elements, then this path ends with
* the given path if the last <em>N</em> elements of each path, starting at
* the element farthest from the root, are equal.
*
* <p> If the given path has a root component then this path ends with the
* given path if the root component of this path <em>ends with</em> the root
* component of the given path, and the corresponding elements of both paths
* are equal. Whether or not the root component of this path ends with the
* root component of the given path is file system specific. If this path
* does not have a root component and the given path has a root component
* then this path does not end with the given path.
*
* <p> If the given path is associated with a different {@code FileSystem}
* to this path then {@code false} is returned.
*
* @param other
* the given path
*
* @return {@code true} if this path ends with the given path; otherwise
* {@code false}
*/
boolean endsWith(Path other);
/**
* Tests if this path ends with a {@code Path}, constructed by converting
* the given path string, in exactly the manner specified by the {@link
* #endsWith(Path) endsWith(Path)} method. On UNIX for example, the path
* "{@code foo/bar}" ends with "{@code foo/bar}" and "{@code bar}". It does
* not end with "{@code r}" or "{@code /bar}". Note that trailing separators
* are not taken into account, and so invoking this method on the {@code
* Path}"{@code foo/bar}" with the {@code String} "{@code bar/}" returns
* {@code true}.
*
* @param other
* the given path string
*
* @return {@code true} if this path ends with the given path; otherwise
* {@code false}
*
* @throws InvalidPathException
* If the path string cannot be converted to a Path.
*/
boolean endsWith(String other);
/**
* Returns a path that is this path with redundant name elements eliminated.
*
* <p> The precise definition of this method is implementation dependent but
* in general it derives from this path, a path that does not contain
* <em>redundant</em> name elements. In many file systems, the "{@code .}"
* and "{@code ..}" are special names used to indicate the current directory
* and parent directory. In such file systems all occurrences of "{@code .}"
* are considered redundant. If a "{@code ..}" is preceded by a
* non-"{@code ..}" name then both names are considered redundant (the
* process to identify such names is repeated until it is no longer
* applicable).
*
* <p> This method does not access the file system; the path may not locate
* a file that exists. Eliminating "{@code ..}" and a preceding name from a
* path may result in the path that locates a different file than the original
* path. This can arise when the preceding name is a symbolic link.
*
* @return the resulting path or this path if it does not contain
* redundant name elements; an empty path is returned if this path
* does have a root component and all name elements are redundant
*
* @see #getParent
* @see #toRealPath
*/
Path normalize();
// -- resolution and relativization --
/**
* Resolve the given path against this path.
*
* <p> If the {@code other} parameter is an {@link #isAbsolute() absolute}
* path then this method trivially returns {@code other}. If {@code other}
* is an <i>empty path</i> then this method trivially returns this path.
* Otherwise this method considers this path to be a directory and resolves
* the given path against this path. In the simplest case, the given path
* does not have a {@link #getRoot root} component, in which case this method
* <em>joins</em> the given path to this path and returns a resulting path
* that {@link #endsWith ends} with the given path. Where the given path has
* a root component then resolution is highly implementation dependent and
* therefore unspecified.
*
* @param other
* the path to resolve against this path
*
* @return the resulting path
*
* @see #relativize
*/
Path resolve(Path other);
/**
* Converts a given path string to a {@code Path} and resolves it against
* this {@code Path} in exactly the manner specified by the {@link
* #resolve(Path) resolve} method. For example, suppose that the name
* separator is "{@code /}" and a path represents "{@code foo/bar}", then
* invoking this method with the path string "{@code gus}" will result in
* the {@code Path} "{@code foo/bar/gus}".
*
* @param other
* the path string to resolve against this path
*
* @return the resulting path
*
* @throws InvalidPathException
* if the path string cannot be converted to a Path.
*
* @see FileSystem#getPath
*/
Path resolve(String other);
/**
* Resolves the given path against this path's {@link #getParent parent}
* path. This is useful where a file name needs to be <i>replaced</i> with
* another file name. For example, suppose that the name separator is
* "{@code /}" and a path represents "{@code dir1/dir2/foo}", then invoking
* this method with the {@code Path} "{@code bar}" will result in the {@code
* Path} "{@code dir1/dir2/bar}". If this path does not have a parent path,
* or {@code other} is {@link #isAbsolute() absolute}, then this method
* returns {@code other}. If {@code other} is an empty path then this method
* returns this path's parent, or where this path doesn't have a parent, the
* empty path.
*
* @param other
* the path to resolve against this path's parent
*
* @return the resulting path
*
* @see #resolve(Path)
*/
Path resolveSibling(Path other);
/**
* Converts a given path string to a {@code Path} and resolves it against
* this path's {@link #getParent parent} path in exactly the manner
* specified by the {@link #resolveSibling(Path) resolveSibling} method.
*
* @param other
* the path string to resolve against this path's parent
*
* @return the resulting path
*
* @throws InvalidPathException
* if the path string cannot be converted to a Path.
*
* @see FileSystem#getPath
*/
Path resolveSibling(String other);
/**
* Constructs a relative path between this path and a given path.
*
* <p> Relativization is the inverse of {@link #resolve(Path) resolution}.
* This method attempts to construct a {@link #isAbsolute relative} path
* that when {@link #resolve(Path) resolved} against this path, yields a
* path that locates the same file as the given path. For example, on UNIX,
* if this path is {@code "/a/b"} and the given path is {@code "/a/b/c/d"}
* then the resulting relative path would be {@code "c/d"}. Where this
* path and the given path do not have a {@link #getRoot root} component,
* then a relative path can be constructed. A relative path cannot be
* constructed if only one of the paths have a root component. Where both
* paths have a root component then it is implementation dependent if a
* relative path can be constructed. If this path and the given path are
* {@link #equals equal} then an <i>empty path</i> is returned.
*
* <p> For any two {@link #normalize normalized} paths <i>p</i> and
* <i>q</i>, where <i>q</i> does not have a root component,
* <blockquote>
* <i>p</i><tt>.relativize(</tt><i>p</i><tt>.resolve(</tt><i>q</i><tt>)).equals(</tt><i>q</i><tt>)</tt>
* </blockquote>
*
* <p> When symbolic links are supported, then whether the resulting path,
* when resolved against this path, yields a path that can be used to locate
* the {@link Files#isSameFile same} file as {@code other} is implementation
* dependent. For example, if this path is {@code "/a/b"} and the given
* path is {@code "/a/x"} then the resulting relative path may be {@code
* "../x"}. If {@code "b"} is a symbolic link then is implementation
* dependent if {@code "a/b/../x"} would locate the same file as {@code "/a/x"}.
*
* @param other
* the path to relativize against this path
*
* @return the resulting relative path, or an empty path if both paths are
* equal
*
* @throws IllegalArgumentException
* if {@code other} is not a {@code Path} that can be relativized
* against this path
*/
Path relativize(Path other);
/**
* Returns a URI to represent this path.
*
* <p> This method constructs an absolute {@link URI} with a {@link
* URI#getScheme() scheme} equal to the URI scheme that identifies the
* provider. The exact form of the scheme specific part is highly provider
* dependent.
*
* <p> In the case of the default provider, the URI is hierarchical with
* a {@link URI#getPath() path} component that is absolute. The query and
* fragment components are undefined. Whether the authority component is
* defined or not is implementation dependent. There is no guarantee that
* the {@code URI} may be used to construct a {@link java.io.File java.io.File}.
* In particular, if this path represents a Universal Naming Convention (UNC)
* path, then the UNC server name may be encoded in the authority component
* of the resulting URI. In the case of the default provider, and the file
* exists, and it can be determined that the file is a directory, then the
* resulting {@code URI} will end with a slash.
*
* <p> The default provider provides a similar <em>round-trip</em> guarantee
* to the {@link java.io.File} class. For a given {@code Path} <i>p</i> it
* is guaranteed that
* <blockquote><tt>
* {@link Paths#get(URI) Paths.get}(</tt><i>p</i><tt>.toUri()).equals(</tt><i>p</i>
* <tt>.{@link #toAbsolutePath() toAbsolutePath}())</tt>
* </blockquote>
* so long as the original {@code Path}, the {@code URI}, and the new {@code
* Path} are all created in (possibly different invocations of) the same
* Java virtual machine. Whether other providers make any guarantees is
* provider specific and therefore unspecified.
*
* <p> When a file system is constructed to access the contents of a file
* as a file system then it is highly implementation specific if the returned
* URI represents the given path in the file system or it represents a
* <em>compound</em> URI that encodes the URI of the enclosing file system.
* A format for compound URIs is not defined in this release; such a scheme
* may be added in a future release.
*
* @return the URI representing this path
*
* @throws java.io.IOError
* if an I/O error occurs obtaining the absolute path, or where a
* file system is constructed to access the contents of a file as
* a file system, and the URI of the enclosing file system cannot be
* obtained
*
* @throws SecurityException
* In the case of the default provider, and a security manager
* is installed, the {@link #toAbsolutePath toAbsolutePath} method
* throws a security exception.
*/
URI toUri();
/**
* Returns a {@code Path} object representing the absolute path of this
* path.
*
* <p> If this path is already {@link Path#isAbsolute absolute} then this
* method simply returns this path. Otherwise, this method resolves the path
* in an implementation dependent manner, typically by resolving the path
* against a file system default directory. Depending on the implementation,
* this method may throw an I/O error if the file system is not accessible.
*
* @return a {@code Path} object representing the absolute path
*
* @throws java.io.IOError
* if an I/O error occurs
* @throws SecurityException
* In the case of the default provider, a security manager
* is installed, and this path is not absolute, then the security
* manager's {@link SecurityManager#checkPropertyAccess(String)
* checkPropertyAccess} method is invoked to check access to the
* system property {@code user.dir}
*/
Path toAbsolutePath();
/**
* Returns the <em>real</em> path of an existing file.
*
* <p> The precise definition of this method is implementation dependent but
* in general it derives from this path, an {@link #isAbsolute absolute}
* path that locates the {@link Files#isSameFile same} file as this path, but
* with name elements that represent the actual name of the directories
* and the file. For example, where filename comparisons on a file system
* are case insensitive then the name elements represent the names in their
* actual case. Additionally, the resulting path has redundant name
* elements removed.
*
* <p> If this path is relative then its absolute path is first obtained,
* as if by invoking the {@link #toAbsolutePath toAbsolutePath} method.
*
* <p> The {@code options} array may be used to indicate how symbolic links
* are handled. By default, symbolic links are resolved to their final
* target. If the option {@link LinkOption#NOFOLLOW_LINKS NOFOLLOW_LINKS} is
* present then this method does not resolve symbolic links.
*
* Some implementations allow special names such as "{@code ..}" to refer to
* the parent directory. When deriving the <em>real path</em>, and a
* "{@code ..}" (or equivalent) is preceded by a non-"{@code ..}" name then
* an implementation will typically cause both names to be removed. When
* not resolving symbolic links and the preceding name is a symbolic link
* then the names are only removed if it guaranteed that the resulting path
* will locate the same file as this path.
*
* @param options
* options indicating how symbolic links are handled
*
* @return an absolute path represent the <em>real</em> path of the file
* located by this object
*
* @throws IOException
* if the file does not exist or an I/O error occurs
* @throws SecurityException
* In the case of the default provider, and a security manager
* is installed, its {@link SecurityManager#checkRead(String) checkRead}
* method is invoked to check read access to the file, and where
* this path is not absolute, its {@link SecurityManager#checkPropertyAccess(String)
* checkPropertyAccess} method is invoked to check access to the
* system property {@code user.dir}
*/
Path toRealPath(LinkOption... options) throws IOException;
/**
* Returns a {@link File} object representing this path. Where this {@code
* Path} is associated with the default provider, then this method is
* equivalent to returning a {@code File} object constructed with the
* {@code String} representation of this path.
*
* <p> If this path was created by invoking the {@code File} {@link
* File#toPath toPath} method then there is no guarantee that the {@code
* File} object returned by this method is {@link #equals equal} to the
* original {@code File}.
*
* @return a {@code File} object representing this path
*
* @throws UnsupportedOperationException
* if this {@code Path} is not associated with the default provider
*/
File toFile();
// -- watchable --
/**
* Registers the file located by this path with a watch service.
*
* <p> In this release, this path locates a directory that exists. The
* directory is registered with the watch service so that entries in the
* directory can be watched. The {@code events} parameter is the events to
* register and may contain the following events:
* <ul>
* <li>{@link StandardWatchEventKinds#ENTRY_CREATE ENTRY_CREATE} -
* entry created or moved into the directory</li>
* <li>{@link StandardWatchEventKinds#ENTRY_DELETE ENTRY_DELETE} -
* entry deleted or moved out of the directory</li>
* <li>{@link StandardWatchEventKinds#ENTRY_MODIFY ENTRY_MODIFY} -
* entry in directory was modified</li>
* </ul>
*
* <p> The {@link WatchEvent#context context} for these events is the
* relative path between the directory located by this path, and the path
* that locates the directory entry that is created, deleted, or modified.
*
* <p> The set of events may include additional implementation specific
* event that are not defined by the enum {@link StandardWatchEventKinds}
*
* <p> The {@code modifiers} parameter specifies <em>modifiers</em> that
* qualify how the directory is registered. This release does not define any
* <em>standard</em> modifiers. It may contain implementation specific
* modifiers.
*
* <p> Where a file is registered with a watch service by means of a symbolic
* link then it is implementation specific if the watch continues to depend
* on the existence of the symbolic link after it is registered.
*
* @param watcher
* the watch service to which this object is to be registered
* @param events
* the events for which this object should be registered
* @param modifiers
* the modifiers, if any, that modify how the object is registered
*
* @return a key representing the registration of this object with the
* given watch service
*
* @throws UnsupportedOperationException
* if unsupported events or modifiers are specified
* @throws IllegalArgumentException
* if an invalid combination of events or modifiers is specified
* @throws ClosedWatchServiceException
* if the watch service is closed
* @throws NotDirectoryException
* if the file is registered to watch the entries in a directory
* and the file is not a directory <i>(optional specific exception)</i>
* @throws IOException
* if an I/O error occurs
* @throws SecurityException
* In the case of the default provider, and a security manager is
* installed, the {@link SecurityManager#checkRead(String) checkRead}
* method is invoked to check read access to the file.
*/
@Override
WatchKey register(WatchService watcher,
WatchEvent.Kind<?>[] events,
WatchEvent.Modifier... modifiers)
throws IOException;
/**
* Registers the file located by this path with a watch service.
*
* <p> An invocation of this method behaves in exactly the same way as the
* invocation
* <pre>
* watchable.{@link #register(WatchService,WatchEvent.Kind[],WatchEvent.Modifier[]) register}(watcher, events, new WatchEvent.Modifier[0]);
* </pre>
*
* <p> <b>Usage Example:</b>
* Suppose we wish to register a directory for entry create, delete, and modify
* events:
* <pre>
* Path dir = ...
* WatchService watcher = ...
*
* WatchKey key = dir.register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY);
* </pre>
* @param watcher
* The watch service to which this object is to be registered
* @param events
* The events for which this object should be registered
*
* @return A key representing the registration of this object with the
* given watch service
*
* @throws UnsupportedOperationException
* If unsupported events are specified
* @throws IllegalArgumentException
* If an invalid combination of events is specified
* @throws ClosedWatchServiceException
* If the watch service is closed
* @throws NotDirectoryException
* If the file is registered to watch the entries in a directory
* and the file is not a directory <i>(optional specific exception)</i>
* @throws IOException
* If an I/O error occurs
* @throws SecurityException
* In the case of the default provider, and a security manager is
* installed, the {@link SecurityManager#checkRead(String) checkRead}
* method is invoked to check read access to the file.
*/
@Override
WatchKey register(WatchService watcher,
WatchEvent.Kind<?>... events)
throws IOException;
// -- Iterable --
/**
* Returns an iterator over the name elements of this path.
*
* <p> The first element returned by the iterator represents the name
* element that is closest to the root in the directory hierarchy, the
* second element is the next closest, and so on. The last element returned
* is the name of the file or directory denoted by this path. The {@link
* #getRoot root} component, if present, is not returned by the iterator.
*
* @return an iterator over the name elements of this path.
*/
@Override
Iterator<Path> iterator();
// -- compareTo/equals/hashCode --
/**
* Compares two abstract paths lexicographically. The ordering defined by
* this method is provider specific, and in the case of the default
* provider, platform specific. This method does not access the file system
* and neither file is required to exist.
*
* <p> This method may not be used to compare paths that are associated
* with different file system providers.
*
* @param other the path compared to this path.
*
* @return zero if the argument is {@link #equals equal} to this path, a
* value less than zero if this path is lexicographically less than
* the argument, or a value greater than zero if this path is
* lexicographically greater than the argument
*
* @throws ClassCastException
* if the paths are associated with different providers
*/
@Override
int compareTo(Path other);
/**
* Tests this path for equality with the given object.
*
* <p> If the given object is not a Path, or is a Path associated with a
* different {@code FileSystem}, then this method returns {@code false}.
*
* <p> Whether or not two path are equal depends on the file system
* implementation. In some cases the paths are compared without regard
* to case, and others are case sensitive. This method does not access the
* file system and the file is not required to exist. Where required, the
* {@link Files#isSameFile isSameFile} method may be used to check if two
* paths locate the same file.
*
* <p> This method satisfies the general contract of the {@link
* java.lang.Object#equals(Object) Object.equals} method. </p>
*
* @param other
* the object to which this object is to be compared
*
* @return {@code true} if, and only if, the given object is a {@code Path}
* that is identical to this {@code Path}
*/
boolean equals(Object other);
/**
* Computes a hash code for this path.
*
* <p> The hash code is based upon the components of the path, and
* satisfies the general contract of the {@link Object#hashCode
* Object.hashCode} method.
*
* @return the hash-code value for this path
*/
int hashCode();
/**
* Returns the string representation of this path.
*
* <p> If this path was created by converting a path string using the
* {@link FileSystem#getPath getPath} method then the path string returned
* by this method may differ from the original String used to create the path.
*
* <p> The returned path string uses the default name {@link
* FileSystem#getSeparator separator} to separate names in the path.
*
* @return the string representation of this path
*/
String toString();
}
|
openjdk/jdk8 | 36,308 | jdk/src/share/classes/javax/swing/plaf/synth/SynthSliderUI.java | /*
* Copyright (c) 2002, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.plaf.synth;
import java.awt.event.*;
import java.awt.Graphics;
import java.awt.Dimension;
import java.awt.FontMetrics;
import java.awt.Rectangle;
import java.awt.Point;
import java.awt.Insets;
import java.beans.*;
import java.util.Dictionary;
import java.util.Enumeration;
import javax.swing.*;
import javax.swing.plaf.*;
import javax.swing.plaf.basic.BasicSliderUI;
import sun.swing.SwingUtilities2;
/**
* Provides the Synth L&F UI delegate for
* {@link JSlider}.
*
* @author Joshua Outwater
* @since 1.7
*/
public class SynthSliderUI extends BasicSliderUI
implements PropertyChangeListener, SynthUI {
private Rectangle valueRect = new Rectangle();
private boolean paintValue;
/**
* When a JSlider is used as a renderer in a JTable, its layout is not
* being recomputed even though the size is changing. Even though there
* is a ComponentListener installed, it is not being notified. As such,
* at times when being asked to paint the layout should first be redone.
* At the end of the layout method we set this lastSize variable, which
* represents the size of the slider the last time it was layed out.
*
* In the paint method we then check to see that this is accurate, that
* the slider has not changed sizes since being last layed out. If necessary
* we recompute the layout.
*/
private Dimension lastSize;
private int trackHeight;
private int trackBorder;
private int thumbWidth;
private int thumbHeight;
private SynthStyle style;
private SynthStyle sliderTrackStyle;
private SynthStyle sliderThumbStyle;
/** Used to determine the color to paint the thumb. */
private transient boolean thumbActive; //happens on rollover, and when pressed
private transient boolean thumbPressed; //happens when mouse was depressed while over thumb
///////////////////////////////////////////////////
// ComponentUI Interface Implementation methods
///////////////////////////////////////////////////
/**
* Creates a new UI object for the given component.
*
* @param c component to create UI object for
* @return the UI object
*/
public static ComponentUI createUI(JComponent c) {
return new SynthSliderUI((JSlider)c);
}
protected SynthSliderUI(JSlider c) {
super(c);
}
/**
* {@inheritDoc}
*/
@Override
protected void installDefaults(JSlider slider) {
updateStyle(slider);
}
/**
* Uninstalls default setting. This method is called when a
* {@code LookAndFeel} is uninstalled.
*/
protected void uninstallDefaults(JSlider slider) {
SynthContext context = getContext(slider, ENABLED);
style.uninstallDefaults(context);
context.dispose();
style = null;
context = getContext(slider, Region.SLIDER_TRACK, ENABLED);
sliderTrackStyle.uninstallDefaults(context);
context.dispose();
sliderTrackStyle = null;
context = getContext(slider, Region.SLIDER_THUMB, ENABLED);
sliderThumbStyle.uninstallDefaults(context);
context.dispose();
sliderThumbStyle = null;
}
/**
* {@inheritDoc}
*/
@Override
protected void installListeners(JSlider slider) {
super.installListeners(slider);
slider.addPropertyChangeListener(this);
}
/**
* {@inheritDoc}
*/
@Override
protected void uninstallListeners(JSlider slider) {
slider.removePropertyChangeListener(this);
super.uninstallListeners(slider);
}
private void updateStyle(JSlider c) {
SynthContext context = getContext(c, ENABLED);
SynthStyle oldStyle = style;
style = SynthLookAndFeel.updateStyle(context, this);
if (style != oldStyle) {
thumbWidth =
style.getInt(context, "Slider.thumbWidth", 30);
thumbHeight =
style.getInt(context, "Slider.thumbHeight", 14);
// handle scaling for sizeVarients for special case components. The
// key "JComponent.sizeVariant" scales for large/small/mini
// components are based on Apples LAF
String scaleKey = (String)slider.getClientProperty(
"JComponent.sizeVariant");
if (scaleKey != null){
if ("large".equals(scaleKey)){
thumbWidth *= 1.15;
thumbHeight *= 1.15;
} else if ("small".equals(scaleKey)){
thumbWidth *= 0.857;
thumbHeight *= 0.857;
} else if ("mini".equals(scaleKey)){
thumbWidth *= 0.784;
thumbHeight *= 0.784;
}
}
trackBorder =
style.getInt(context, "Slider.trackBorder", 1);
trackHeight = thumbHeight + trackBorder * 2;
paintValue = style.getBoolean(context,
"Slider.paintValue", true);
if (oldStyle != null) {
uninstallKeyboardActions(c);
installKeyboardActions(c);
}
}
context.dispose();
context = getContext(c, Region.SLIDER_TRACK, ENABLED);
sliderTrackStyle =
SynthLookAndFeel.updateStyle(context, this);
context.dispose();
context = getContext(c, Region.SLIDER_THUMB, ENABLED);
sliderThumbStyle =
SynthLookAndFeel.updateStyle(context, this);
context.dispose();
}
/**
* {@inheritDoc}
*/
@Override
protected TrackListener createTrackListener(JSlider s) {
return new SynthTrackListener();
}
private void updateThumbState(int x, int y) {
setThumbActive(thumbRect.contains(x, y));
}
private void updateThumbState(int x, int y, boolean pressed) {
updateThumbState(x, y);
setThumbPressed(pressed);
}
private void setThumbActive(boolean active) {
if (thumbActive != active) {
thumbActive = active;
slider.repaint(thumbRect);
}
}
private void setThumbPressed(boolean pressed) {
if (thumbPressed != pressed) {
thumbPressed = pressed;
slider.repaint(thumbRect);
}
}
/**
* {@inheritDoc}
*/
@Override
public int getBaseline(JComponent c, int width, int height) {
if (c == null) {
throw new NullPointerException("Component must be non-null");
}
if (width < 0 || height < 0) {
throw new IllegalArgumentException(
"Width and height must be >= 0");
}
if (slider.getPaintLabels() && labelsHaveSameBaselines()) {
// Get the insets for the track.
Insets trackInsets = new Insets(0, 0, 0, 0);
SynthContext trackContext = getContext(slider,
Region.SLIDER_TRACK);
style.getInsets(trackContext, trackInsets);
trackContext.dispose();
if (slider.getOrientation() == JSlider.HORIZONTAL) {
int valueHeight = 0;
if (paintValue) {
SynthContext context = getContext(slider);
valueHeight = context.getStyle().getGraphicsUtils(context).
getMaximumCharHeight(context);
context.dispose();
}
int tickHeight = 0;
if (slider.getPaintTicks()) {
tickHeight = getTickLength();
}
int labelHeight = getHeightOfTallestLabel();
int contentHeight = valueHeight + trackHeight +
trackInsets.top + trackInsets.bottom +
tickHeight + labelHeight + 4;
int centerY = height / 2 - contentHeight / 2;
centerY += valueHeight + 2;
centerY += trackHeight + trackInsets.top + trackInsets.bottom;
centerY += tickHeight + 2;
JComponent label = (JComponent) slider.getLabelTable().elements().nextElement();
Dimension pref = label.getPreferredSize();
return centerY + label.getBaseline(pref.width, pref.height);
}
else { // VERTICAL
Integer value = slider.getInverted() ? getLowestValue() :
getHighestValue();
if (value != null) {
int valueY = insetCache.top;
int valueHeight = 0;
if (paintValue) {
SynthContext context = getContext(slider);
valueHeight = context.getStyle().getGraphicsUtils(
context).getMaximumCharHeight(context);
context.dispose();
}
int contentHeight = height - insetCache.top -
insetCache.bottom;
int trackY = valueY + valueHeight;
int trackHeight = contentHeight - valueHeight;
int yPosition = yPositionForValue(value.intValue(), trackY,
trackHeight);
JComponent label = (JComponent) slider.getLabelTable().get(value);
Dimension pref = label.getPreferredSize();
return yPosition - pref.height / 2 +
label.getBaseline(pref.width, pref.height);
}
}
}
return -1;
}
/**
* {@inheritDoc}
*/
@Override
public Dimension getPreferredSize(JComponent c) {
recalculateIfInsetsChanged();
Dimension d = new Dimension(contentRect.width, contentRect.height);
if (slider.getOrientation() == JSlider.VERTICAL) {
d.height = 200;
} else {
d.width = 200;
}
Insets i = slider.getInsets();
d.width += i.left + i.right;
d.height += i.top + i.bottom;
return d;
}
/**
* {@inheritDoc}
*/
@Override
public Dimension getMinimumSize(JComponent c) {
recalculateIfInsetsChanged();
Dimension d = new Dimension(contentRect.width, contentRect.height);
if (slider.getOrientation() == JSlider.VERTICAL) {
d.height = thumbRect.height + insetCache.top + insetCache.bottom;
} else {
d.width = thumbRect.width + insetCache.left + insetCache.right;
}
return d;
}
/**
* {@inheritDoc}
*/
@Override
protected void calculateGeometry() {
calculateThumbSize();
layout();
calculateThumbLocation();
}
/**
* Lays out the slider.
*/
protected void layout() {
SynthContext context = getContext(slider);
SynthGraphicsUtils synthGraphics = style.getGraphicsUtils(context);
// Get the insets for the track.
Insets trackInsets = new Insets(0, 0, 0, 0);
SynthContext trackContext = getContext(slider, Region.SLIDER_TRACK);
style.getInsets(trackContext, trackInsets);
trackContext.dispose();
if (slider.getOrientation() == JSlider.HORIZONTAL) {
// Calculate the height of all the subcomponents so we can center
// them.
valueRect.height = 0;
if (paintValue) {
valueRect.height =
synthGraphics.getMaximumCharHeight(context);
}
trackRect.height = trackHeight;
tickRect.height = 0;
if (slider.getPaintTicks()) {
tickRect.height = getTickLength();
}
labelRect.height = 0;
if (slider.getPaintLabels()) {
labelRect.height = getHeightOfTallestLabel();
}
contentRect.height = valueRect.height + trackRect.height
+ trackInsets.top + trackInsets.bottom
+ tickRect.height + labelRect.height + 4;
contentRect.width = slider.getWidth() - insetCache.left
- insetCache.right;
// Check if any of the labels will paint out of bounds.
int pad = 0;
if (slider.getPaintLabels()) {
// Calculate the track rectangle. It is necessary for
// xPositionForValue to return correct values.
trackRect.x = insetCache.left;
trackRect.width = contentRect.width;
Dictionary dictionary = slider.getLabelTable();
if (dictionary != null) {
int minValue = slider.getMinimum();
int maxValue = slider.getMaximum();
// Iterate through the keys in the dictionary and find the
// first and last labels indices that fall within the
// slider range.
int firstLblIdx = Integer.MAX_VALUE;
int lastLblIdx = Integer.MIN_VALUE;
for (Enumeration keys = dictionary.keys();
keys.hasMoreElements(); ) {
int keyInt = ((Integer)keys.nextElement()).intValue();
if (keyInt >= minValue && keyInt < firstLblIdx) {
firstLblIdx = keyInt;
}
if (keyInt <= maxValue && keyInt > lastLblIdx) {
lastLblIdx = keyInt;
}
}
// Calculate the pad necessary for the labels at the first
// and last visible indices.
pad = getPadForLabel(firstLblIdx);
pad = Math.max(pad, getPadForLabel(lastLblIdx));
}
}
// Calculate the painting rectangles for each of the different
// slider areas.
valueRect.x = trackRect.x = tickRect.x = labelRect.x =
(insetCache.left + pad);
valueRect.width = trackRect.width = tickRect.width =
labelRect.width = (contentRect.width - (pad * 2));
int centerY = slider.getHeight() / 2 - contentRect.height / 2;
valueRect.y = centerY;
centerY += valueRect.height + 2;
trackRect.y = centerY + trackInsets.top;
centerY += trackRect.height + trackInsets.top + trackInsets.bottom;
tickRect.y = centerY;
centerY += tickRect.height + 2;
labelRect.y = centerY;
centerY += labelRect.height;
} else {
// Calculate the width of all the subcomponents so we can center
// them.
trackRect.width = trackHeight;
tickRect.width = 0;
if (slider.getPaintTicks()) {
tickRect.width = getTickLength();
}
labelRect.width = 0;
if (slider.getPaintLabels()) {
labelRect.width = getWidthOfWidestLabel();
}
valueRect.y = insetCache.top;
valueRect.height = 0;
if (paintValue) {
valueRect.height =
synthGraphics.getMaximumCharHeight(context);
}
// Get the max width of the min or max value of the slider.
FontMetrics fm = slider.getFontMetrics(slider.getFont());
valueRect.width = Math.max(
synthGraphics.computeStringWidth(context, slider.getFont(),
fm, "" + slider.getMaximum()),
synthGraphics.computeStringWidth(context, slider.getFont(),
fm, "" + slider.getMinimum()));
int l = valueRect.width / 2;
int w1 = trackInsets.left + trackRect.width / 2;
int w2 = trackRect.width / 2 + trackInsets.right +
tickRect.width + labelRect.width;
contentRect.width = Math.max(w1, l) + Math.max(w2, l) +
2 + insetCache.left + insetCache.right;
contentRect.height = slider.getHeight() -
insetCache.top - insetCache.bottom;
// Layout the components.
trackRect.y = tickRect.y = labelRect.y =
valueRect.y + valueRect.height;
trackRect.height = tickRect.height = labelRect.height =
contentRect.height - valueRect.height;
int startX = slider.getWidth() / 2 - contentRect.width / 2;
if (SynthLookAndFeel.isLeftToRight(slider)) {
if (l > w1) {
startX += (l - w1);
}
trackRect.x = startX + trackInsets.left;
startX += trackInsets.left + trackRect.width + trackInsets.right;
tickRect.x = startX;
labelRect.x = startX + tickRect.width + 2;
} else {
if (l > w2) {
startX += (l - w2);
}
labelRect.x = startX;
startX += labelRect.width + 2;
tickRect.x = startX;
trackRect.x = startX + tickRect.width + trackInsets.left;
}
}
context.dispose();
lastSize = slider.getSize();
}
/**
* Calculates the pad for the label at the specified index.
*
* @param i index of the label to calculate pad for.
* @return padding required to keep label visible.
*/
private int getPadForLabel(int i) {
int pad = 0;
JComponent c = (JComponent) slider.getLabelTable().get(i);
if (c != null) {
int centerX = xPositionForValue(i);
int cHalfWidth = c.getPreferredSize().width / 2;
if (centerX - cHalfWidth < insetCache.left) {
pad = Math.max(pad, insetCache.left - (centerX - cHalfWidth));
}
if (centerX + cHalfWidth > slider.getWidth() - insetCache.right) {
pad = Math.max(pad, (centerX + cHalfWidth) -
(slider.getWidth() - insetCache.right));
}
}
return pad;
}
/**
* {@inheritDoc}
*/
@Override
protected void calculateThumbLocation() {
super.calculateThumbLocation();
if (slider.getOrientation() == JSlider.HORIZONTAL) {
thumbRect.y += trackBorder;
} else {
thumbRect.x += trackBorder;
}
Point mousePosition = slider.getMousePosition();
if(mousePosition != null) {
updateThumbState(mousePosition.x, mousePosition.y);
}
}
/**
* {@inheritDoc}
*/
@Override
public void setThumbLocation(int x, int y) {
super.setThumbLocation(x, y);
// Value rect is tied to the thumb location. We need to repaint when
// the thumb repaints.
slider.repaint(valueRect.x, valueRect.y,
valueRect.width, valueRect.height);
setThumbActive(false);
}
/**
* {@inheritDoc}
*/
@Override
protected int xPositionForValue(int value) {
int min = slider.getMinimum();
int max = slider.getMaximum();
int trackLeft = trackRect.x + thumbRect.width / 2 + trackBorder;
int trackRight = trackRect.x + trackRect.width - thumbRect.width / 2
- trackBorder;
int trackLength = trackRight - trackLeft;
double valueRange = (double)max - (double)min;
double pixelsPerValue = (double)trackLength / valueRange;
int xPosition;
if (!drawInverted()) {
xPosition = trackLeft;
xPosition += Math.round( pixelsPerValue * ((double)value - min));
} else {
xPosition = trackRight;
xPosition -= Math.round( pixelsPerValue * ((double)value - min));
}
xPosition = Math.max(trackLeft, xPosition);
xPosition = Math.min(trackRight, xPosition);
return xPosition;
}
/**
* {@inheritDoc}
*/
@Override
protected int yPositionForValue(int value, int trackY, int trackHeight) {
int min = slider.getMinimum();
int max = slider.getMaximum();
int trackTop = trackY + thumbRect.height / 2 + trackBorder;
int trackBottom = trackY + trackHeight - thumbRect.height / 2 -
trackBorder;
int trackLength = trackBottom - trackTop;
double valueRange = (double)max - (double)min;
double pixelsPerValue = (double)trackLength / valueRange;
int yPosition;
if (!drawInverted()) {
yPosition = trackTop;
yPosition += Math.round(pixelsPerValue * ((double)max - value));
} else {
yPosition = trackTop;
yPosition += Math.round(pixelsPerValue * ((double)value - min));
}
yPosition = Math.max(trackTop, yPosition);
yPosition = Math.min(trackBottom, yPosition);
return yPosition;
}
/**
* {@inheritDoc}
*/
@Override
public int valueForYPosition(int yPos) {
int value;
int minValue = slider.getMinimum();
int maxValue = slider.getMaximum();
int trackTop = trackRect.y + thumbRect.height / 2 + trackBorder;
int trackBottom = trackRect.y + trackRect.height
- thumbRect.height / 2 - trackBorder;
int trackLength = trackBottom - trackTop;
if (yPos <= trackTop) {
value = drawInverted() ? minValue : maxValue;
} else if (yPos >= trackBottom) {
value = drawInverted() ? maxValue : minValue;
} else {
int distanceFromTrackTop = yPos - trackTop;
double valueRange = (double)maxValue - (double)minValue;
double valuePerPixel = valueRange / (double)trackLength;
int valueFromTrackTop =
(int)Math.round(distanceFromTrackTop * valuePerPixel);
value = drawInverted() ?
minValue + valueFromTrackTop : maxValue - valueFromTrackTop;
}
return value;
}
/**
* {@inheritDoc}
*/
@Override
public int valueForXPosition(int xPos) {
int value;
int minValue = slider.getMinimum();
int maxValue = slider.getMaximum();
int trackLeft = trackRect.x + thumbRect.width / 2 + trackBorder;
int trackRight = trackRect.x + trackRect.width
- thumbRect.width / 2 - trackBorder;
int trackLength = trackRight - trackLeft;
if (xPos <= trackLeft) {
value = drawInverted() ? maxValue : minValue;
} else if (xPos >= trackRight) {
value = drawInverted() ? minValue : maxValue;
} else {
int distanceFromTrackLeft = xPos - trackLeft;
double valueRange = (double)maxValue - (double)minValue;
double valuePerPixel = valueRange / (double)trackLength;
int valueFromTrackLeft =
(int)Math.round(distanceFromTrackLeft * valuePerPixel);
value = drawInverted() ?
maxValue - valueFromTrackLeft : minValue + valueFromTrackLeft;
}
return value;
}
/**
* {@inheritDoc}
*/
@Override
protected Dimension getThumbSize() {
Dimension size = new Dimension();
if (slider.getOrientation() == JSlider.VERTICAL) {
size.width = thumbHeight;
size.height = thumbWidth;
} else {
size.width = thumbWidth;
size.height = thumbHeight;
}
return size;
}
/**
* {@inheritDoc}
*/
@Override
protected void recalculateIfInsetsChanged() {
SynthContext context = getContext(slider);
Insets newInsets = style.getInsets(context, null);
Insets compInsets = slider.getInsets();
newInsets.left += compInsets.left; newInsets.right += compInsets.right;
newInsets.top += compInsets.top; newInsets.bottom += compInsets.bottom;
if (!newInsets.equals(insetCache)) {
insetCache = newInsets;
calculateGeometry();
}
context.dispose();
}
/**
* {@inheritDoc}
*/
@Override
public SynthContext getContext(JComponent c) {
return getContext(c, SynthLookAndFeel.getComponentState(c));
}
private SynthContext getContext(JComponent c, int state) {
return SynthContext.getContext(SynthContext.class, c,
SynthLookAndFeel.getRegion(c), style, state);
}
private SynthContext getContext(JComponent c, Region subregion) {
return getContext(c, subregion, getComponentState(c, subregion));
}
private SynthContext getContext(JComponent c, Region subregion, int state) {
SynthStyle style = null;
Class klass = SynthContext.class;
if (subregion == Region.SLIDER_TRACK) {
style = sliderTrackStyle;
} else if (subregion == Region.SLIDER_THUMB) {
style = sliderThumbStyle;
}
return SynthContext.getContext(klass, c, subregion, style, state);
}
private int getComponentState(JComponent c, Region region) {
if (region == Region.SLIDER_THUMB && thumbActive &&c.isEnabled()) {
int state = thumbPressed ? PRESSED : MOUSE_OVER;
if (c.isFocusOwner()) state |= FOCUSED;
return state;
}
return SynthLookAndFeel.getComponentState(c);
}
/**
* Notifies this UI delegate to repaint the specified component.
* This method paints the component background, then calls
* the {@link #paint(SynthContext,Graphics)} method.
*
* <p>In general, this method does not need to be overridden by subclasses.
* All Look and Feel rendering code should reside in the {@code paint} method.
*
* @param g the {@code Graphics} object used for painting
* @param c the component being painted
* @see #paint(SynthContext,Graphics)
*/
@Override
public void update(Graphics g, JComponent c) {
SynthContext context = getContext(c);
SynthLookAndFeel.update(context, g);
context.getPainter().paintSliderBackground(context,
g, 0, 0, c.getWidth(), c.getHeight(),
slider.getOrientation());
paint(context, g);
context.dispose();
}
/**
* Paints the specified component according to the Look and Feel.
* <p>This method is not used by Synth Look and Feel.
* Painting is handled by the {@link #paint(SynthContext,Graphics)} method.
*
* @param g the {@code Graphics} object used for painting
* @param c the component being painted
* @see #paint(SynthContext,Graphics)
*/
@Override
public void paint(Graphics g, JComponent c) {
SynthContext context = getContext(c);
paint(context, g);
context.dispose();
}
/**
* Paints the specified component.
*
* @param context context for the component being painted
* @param g the {@code Graphics} object used for painting
* @see #update(Graphics,JComponent)
*/
protected void paint(SynthContext context, Graphics g) {
recalculateIfInsetsChanged();
recalculateIfOrientationChanged();
Rectangle clip = g.getClipBounds();
if (lastSize == null || !lastSize.equals(slider.getSize())) {
calculateGeometry();
}
if (paintValue) {
FontMetrics fm = SwingUtilities2.getFontMetrics(slider, g);
int labelWidth = context.getStyle().getGraphicsUtils(context).
computeStringWidth(context, g.getFont(), fm,
"" + slider.getValue());
valueRect.x = thumbRect.x + (thumbRect.width - labelWidth) / 2;
// For horizontal sliders, make sure value is not painted
// outside slider bounds.
if (slider.getOrientation() == JSlider.HORIZONTAL) {
if (valueRect.x + labelWidth > insetCache.left + contentRect.width) {
valueRect.x = (insetCache.left + contentRect.width) - labelWidth;
}
valueRect.x = Math.max(valueRect.x, 0);
}
g.setColor(context.getStyle().getColor(
context, ColorType.TEXT_FOREGROUND));
context.getStyle().getGraphicsUtils(context).paintText(
context, g, "" + slider.getValue(), valueRect.x,
valueRect.y, -1);
}
if (slider.getPaintTrack() && clip.intersects(trackRect)) {
SynthContext subcontext = getContext(slider, Region.SLIDER_TRACK);
paintTrack(subcontext, g, trackRect);
subcontext.dispose();
}
if (clip.intersects(thumbRect)) {
SynthContext subcontext = getContext(slider, Region.SLIDER_THUMB);
paintThumb(subcontext, g, thumbRect);
subcontext.dispose();
}
if (slider.getPaintTicks() && clip.intersects(tickRect)) {
paintTicks(g);
}
if (slider.getPaintLabels() && clip.intersects(labelRect)) {
paintLabels(g);
}
}
/**
* {@inheritDoc}
*/
@Override
public void paintBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
context.getPainter().paintSliderBorder(context, g, x, y, w, h,
slider.getOrientation());
}
/**
* Paints the slider thumb.
*
* @param context context for the component being painted
* @param g {@code Graphics} object used for painting
* @param thumbBounds bounding box for the thumb
*/
protected void paintThumb(SynthContext context, Graphics g,
Rectangle thumbBounds) {
int orientation = slider.getOrientation();
SynthLookAndFeel.updateSubregion(context, g, thumbBounds);
context.getPainter().paintSliderThumbBackground(context, g,
thumbBounds.x, thumbBounds.y, thumbBounds.width,
thumbBounds.height, orientation);
context.getPainter().paintSliderThumbBorder(context, g,
thumbBounds.x, thumbBounds.y, thumbBounds.width,
thumbBounds.height, orientation);
}
/**
* Paints the slider track.
*
* @param context context for the component being painted
* @param g {@code Graphics} object used for painting
* @param trackBounds bounding box for the track
*/
protected void paintTrack(SynthContext context, Graphics g,
Rectangle trackBounds) {
int orientation = slider.getOrientation();
SynthLookAndFeel.updateSubregion(context, g, trackBounds);
context.getPainter().paintSliderTrackBackground(context, g,
trackBounds.x, trackBounds.y, trackBounds.width,
trackBounds.height, orientation);
context.getPainter().paintSliderTrackBorder(context, g,
trackBounds.x, trackBounds.y, trackBounds.width,
trackBounds.height, orientation);
}
/**
* {@inheritDoc}
*/
@Override
public void propertyChange(PropertyChangeEvent e) {
if (SynthLookAndFeel.shouldUpdateStyle(e)) {
updateStyle((JSlider)e.getSource());
}
}
//////////////////////////////////////////////////
/// Track Listener Class
//////////////////////////////////////////////////
/**
* Track mouse movements.
*/
private class SynthTrackListener extends TrackListener {
@Override public void mouseExited(MouseEvent e) {
setThumbActive(false);
}
@Override public void mousePressed(MouseEvent e) {
super.mousePressed(e);
setThumbPressed(thumbRect.contains(e.getX(), e.getY()));
}
@Override public void mouseReleased(MouseEvent e) {
super.mouseReleased(e);
updateThumbState(e.getX(), e.getY(), false);
}
@Override public void mouseDragged(MouseEvent e) {
int thumbMiddle;
if (!slider.isEnabled()) {
return;
}
currentMouseX = e.getX();
currentMouseY = e.getY();
if (!isDragging()) {
return;
}
slider.setValueIsAdjusting(true);
switch (slider.getOrientation()) {
case JSlider.VERTICAL:
int halfThumbHeight = thumbRect.height / 2;
int thumbTop = e.getY() - offset;
int trackTop = trackRect.y;
int trackBottom = trackRect.y + trackRect.height
- halfThumbHeight - trackBorder;
int vMax = yPositionForValue(slider.getMaximum() -
slider.getExtent());
if (drawInverted()) {
trackBottom = vMax;
trackTop = trackTop + halfThumbHeight;
} else {
trackTop = vMax;
}
thumbTop = Math.max(thumbTop, trackTop - halfThumbHeight);
thumbTop = Math.min(thumbTop, trackBottom - halfThumbHeight);
setThumbLocation(thumbRect.x, thumbTop);
thumbMiddle = thumbTop + halfThumbHeight;
slider.setValue(valueForYPosition(thumbMiddle));
break;
case JSlider.HORIZONTAL:
int halfThumbWidth = thumbRect.width / 2;
int thumbLeft = e.getX() - offset;
int trackLeft = trackRect.x + halfThumbWidth + trackBorder;
int trackRight = trackRect.x + trackRect.width
- halfThumbWidth - trackBorder;
int hMax = xPositionForValue(slider.getMaximum() -
slider.getExtent());
if (drawInverted()) {
trackLeft = hMax;
} else {
trackRight = hMax;
}
thumbLeft = Math.max(thumbLeft, trackLeft - halfThumbWidth);
thumbLeft = Math.min(thumbLeft, trackRight - halfThumbWidth);
setThumbLocation(thumbLeft, thumbRect.y);
thumbMiddle = thumbLeft + halfThumbWidth;
slider.setValue(valueForXPosition(thumbMiddle));
break;
default:
return;
}
if (slider.getValueIsAdjusting()) {
setThumbActive(true);
}
}
@Override public void mouseMoved(MouseEvent e) {
updateThumbState(e.getX(), e.getY());
}
}
}
|
oracle/graal | 36,178 | espresso/src/com.oracle.truffle.espresso.processor/src/com/oracle/truffle/espresso/processor/SubstitutionProcessor.java | /*
* Copyright (c) 2019, 2021, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.truffle.espresso.processor;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.Name;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
import com.oracle.truffle.espresso.processor.builders.ClassBuilder;
import com.oracle.truffle.espresso.processor.builders.FieldBuilder;
import com.oracle.truffle.espresso.processor.builders.IndentingStringBuilder;
import com.oracle.truffle.espresso.processor.builders.MethodBuilder;
import com.oracle.truffle.espresso.processor.builders.ModifierBuilder;
import com.oracle.truffle.espresso.processor.builders.StatementBuilder;
public final class SubstitutionProcessor extends EspressoProcessor {
// @EspressoSubstitutions
private TypeElement espressoSubstitutions;
// @Substitution
private TypeElement substitutionAnnotation;
// @InlineInBytecode
private TypeElement inlineInBytecodeAnnotation;
// @JavaType
private TypeElement javaType;
// NoProvider.class
private TypeElement noProvider;
// InlinedMethodPredicate.class
private TypeElement noPredicate;
// region Various String constants.
private static final String SUBSTITUTION_PACKAGE = "com.oracle.truffle.espresso.substitutions";
private static final String ESPRESSO_SUBSTITUTIONS = SUBSTITUTION_PACKAGE + "." + "EspressoSubstitutions";
private static final String SUBSTITUTION = SUBSTITUTION_PACKAGE + "." + "Substitution";
private static final String INLINE_IN_BYTECODE = SUBSTITUTION_PACKAGE + "." + "InlineInBytecode";
private static final String JAVA_TYPE = SUBSTITUTION_PACKAGE + "." + "JavaType";
private static final String NO_PROVIDER = SUBSTITUTION_PACKAGE + "." + "SubstitutionNamesProvider" + "." + "NoProvider";
private static final String SUBSTITUTOR = "JavaSubstitution";
private static final String GET_METHOD_NAME = "getMethodNames";
private static final String SUBSTITUTION_CLASS_NAMES = "substitutionClassNames";
private static final String INSTANCE = "INSTANCE";
private static final String VIRTUAL_FRAME_IMPORT = "com.oracle.truffle.api.frame.VirtualFrame";
private static final String ESPRESSO_FRAME = "EspressoFrame";
private static final String ESPRESSO_FRAME_IMPORT = "com.oracle.truffle.espresso.nodes.EspressoFrame";
private static final String INLINED_FRAME_ACCESS = "InlinedFrameAccess";
private static final String INLINED_FRAME_ACCESS_IMPORT = "com.oracle.truffle.espresso.nodes.quick.invoke.inline." + INLINED_FRAME_ACCESS;
private static final String INLINED_METHOD_PREDICATE = "InlinedMethodPredicate";
private static final String INLINED_METHOD_PREDICATE_IMPORT = "com.oracle.truffle.espresso.nodes.quick.invoke.inline." + INLINED_METHOD_PREDICATE;
public SubstitutionProcessor() {
super(SUBSTITUTION_PACKAGE, SUBSTITUTOR);
}
static class SubstitutorHelper extends SubstitutionHelper {
final String targetClassName;
final String guestMethodName;
final List<String> guestTypeNames;
final String returnType;
final boolean hasReceiver;
final TypeMirror nameProvider;
final TypeMirror languageFilter;
final boolean inlineInBytecode;
final TypeMirror guardValue;
final byte flags;
final TypeMirror group;
SubstitutorHelper(EspressoProcessor processor, Element target, String targetClassName, String guestMethodName, List<String> guestTypeNames, String returnType,
boolean hasReceiver, TypeMirror nameProvider, TypeMirror languageFilter, boolean inlineInBytecode, TypeMirror guardValue, TypeElement substitutionClass,
byte flags, TypeMirror group) {
super(processor, target, processor.getTypeElement(SUBSTITUTION), substitutionClass);
this.targetClassName = targetClassName;
this.guestMethodName = guestMethodName;
this.guestTypeNames = guestTypeNames;
this.returnType = returnType;
this.hasReceiver = hasReceiver;
this.nameProvider = nameProvider;
this.languageFilter = languageFilter;
this.inlineInBytecode = inlineInBytecode;
this.guardValue = guardValue;
this.flags = flags;
this.group = group;
}
@Override
public TypeMirror getCollectTarget() {
return group;
}
}
private String extractInvocation(int nParameters, SubstitutorHelper helper) {
StringBuilder str = new StringBuilder();
if (helper.isNodeTarget()) {
ExecutableElement nodeExecute = findNodeExecute(helper.getNodeTarget());
String nodeMethodName = nodeExecute.getSimpleName().toString();
str.append("this.node.").append(nodeMethodName).append("(");
} else {
String methodName = helper.getMethodTarget().getSimpleName().toString();
str.append(helper.getEnclosingClass().getQualifiedName().toString()).append(".").append(methodName).append("(");
}
boolean first = true;
for (int i = 0; i < nParameters; i++) {
first = checkFirst(str, first);
str.append(ARG_NAME).append(i);
}
first = appendInvocationMetaInformation(str, first, helper);
str.append(")");
return str.toString();
}
private static String generateParameterTypes(List<String> types, int tabulation) {
IndentingStringBuilder sb = new IndentingStringBuilder(0);
sb.appendLine("new String[]{");
sb.setIndentLevel(tabulation + 1);
for (String type : types) {
sb.append('\"').append(type).appendLine("\",");
}
sb.lowerIndentLevel();
sb.append('}');
return sb.toString();
}
private void processElement(Element substitution) {
assert substitution.getKind() == ElementKind.CLASS;
TypeElement typeElement = (TypeElement) substitution;
AnnotationMirror annotation = getAnnotation(substitution, espressoSubstitutions);
assert annotation != null;
String className = typeElement.getSimpleName().toString();
// Extract the class name. (Of the form Target_[...]).
// Obtain the guest class that will be substituted.
String targetClassName = className;
if (className.startsWith("Target_")) {
// Simple default case: substitution is using the "Target_" scheme.
targetClassName = "L" + className.substring("Target_".length()).replace("_", "/") + ";";
}
int successfulScheme = 0;
// If it exists, collect the value of EspressoSubstitutions.value()
TypeMirror targetClass = getAnnotationValue(annotation, "value", TypeMirror.class);
assert targetClass != null; // Default value is EspressoSubstitutions.class
// If it exists, collect the value of EspressoSubstitutions.type()
String targetType = getAnnotationValue(annotation, "type", String.class);
if (!processingEnv.getTypeUtils().isSameType(targetClass, espressoSubstitutions.asType())) {
targetClassName = "L" + targetClass.toString().replace(".", "/") + ";";
successfulScheme++;
}
if (targetType != null && !targetType.isEmpty()) {
targetClassName = targetType;
successfulScheme++;
}
if (successfulScheme > 1) {
throw new AssertionError("Both 'value' and 'type' are specified for @EspressoSubstitution " + className);
}
// Get the name provider. Will override the previously obtained target class name.
TypeMirror defaultNameProvider = getNameProvider(annotation);
// Thr group to be used for the @Collect annotation
TypeMirror group = getAnnotationValue(annotation, "group", TypeMirror.class);
for (Element element : substitution.getEnclosedElements()) {
processSubstitution(element, className, defaultNameProvider, targetClassName, typeElement, group);
}
}
private void checkParameterOrReturnType(String headerMessage, TypeMirror typeMirror, Element element) {
if (typeMirror.getKind().isPrimitive()) {
if (getAnnotation(typeMirror, javaType) != null) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
headerMessage + " (primitive type) cannot be annotated with @JavaType", element);
}
} else if (typeMirror.getKind() != TypeKind.VOID) {
// Reference type.
if (!processingEnv.getTypeUtils().isSameType(typeMirror, staticObject.asType())) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.WARNING,
headerMessage + " is not of type StaticObject", element);
}
// @JavaType annotation check is done in SubstitutionProcessor.getGuestTypes
}
}
private void checkInjectedParameter(String headerMessage, TypeMirror typeMirror, Element element) {
AnnotationMirror injectMirror = getAnnotation(typeMirror, inject);
if (injectMirror == null) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
headerMessage + " must be annotated with @Inject", element);
}
}
private void checkTargetMethod(ExecutableElement targetElement) {
for (VariableElement param : targetElement.getParameters()) {
if (isActualParameter(param)) {
checkParameterOrReturnType("Substitution parameter", param.asType(), param);
} else {
checkInjectedParameter("Substitution parameter", param.asType(), param);
}
}
checkParameterOrReturnType("Substitution return type", targetElement.getReturnType(), targetElement);
}
private void checkSubstitutionElement(Element element) {
if (element.getKind() == ElementKind.METHOD) {
ExecutableElement methodElement = (ExecutableElement) element;
Set<Modifier> modifiers = methodElement.getModifiers();
if (modifiers.contains(Modifier.PRIVATE) || modifiers.contains(Modifier.PROTECTED)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Substitution method cannot be private nor protected", element);
}
if (!modifiers.contains(Modifier.STATIC)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Substitution method must be static", element);
}
checkTargetMethod(methodElement);
}
if (element.getKind() == ElementKind.CLASS) {
TypeElement typeElement = (TypeElement) element;
Set<Modifier> modifiers = typeElement.getModifiers();
if (modifiers.contains(Modifier.PRIVATE) || modifiers.contains(Modifier.PROTECTED)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Substitution method cannot be private nor protected", element);
}
ExecutableElement targetMethod = findNodeExecute(typeElement);
if (targetMethod != null) {
checkTargetMethod(targetMethod);
}
}
}
/**
* Converts from CamelCase to lowerCamelCase.
*/
private static String toLowerCamelCase(String s) {
if (s.isEmpty()) {
return s;
}
int codePoint0 = s.codePointAt(0);
String tail = s.substring(Character.charCount(codePoint0));
return new StringBuilder(s.length()) //
.appendCodePoint(Character.toLowerCase(codePoint0)) //
.append(tail) //
.toString();
}
@Override
protected String getSubstutitutedMethodName(Element targetElement) {
AnnotationMirror subst = getAnnotation(targetElement, substitutionAnnotation);
String name = getAnnotationValue(subst, "methodName", String.class);
if (name.isEmpty()) {
if (targetElement.getKind() == ElementKind.CLASS) {
// If methodName is not specified, use camel case version of the class name.
// e.g. IsNull node -> isNull.
String elementName = targetElement.getSimpleName().toString();
name = toLowerCamelCase(elementName);
} else if (targetElement.getKind() == ElementKind.METHOD) {
// If methodName is not specified, use the target method name.
name = targetElement.getSimpleName().toString();
} else {
throw new AssertionError("Unexpected: " + targetElement);
}
}
return name;
}
void processSubstitution(Element element, String className, TypeMirror defaultNameProvider, String targetClassName, TypeElement substitutionClass, TypeMirror group) {
assert element.getKind() == ElementKind.METHOD || element.getKind() == ElementKind.CLASS;
TypeElement declaringClass = (TypeElement) element.getEnclosingElement();
String targetPackage = env().getElementUtils().getPackageOf(declaringClass).getQualifiedName().toString();
// Class wide @InlineInBytecode annotation.
AnnotationMirror classWideInline = getAnnotation(declaringClass, inlineInBytecodeAnnotation);
// Find the methods annotated with @Substitution.
AnnotationMirror subst = getAnnotation(element, substitutionAnnotation);
if (subst != null) {
// Sanity check.
checkSubstitutionElement(element);
// Obtain the name of the element to be substituted in.
String targetMethodName = getSubstutitutedMethodName(element);
/*
* Obtain the actual target method to call in the substitution. This is the method that
* will be called in the substitution: Either element itself, for method substitutions.
* Or the execute method of the Truffle node, for node substitutions.
*/
ExecutableElement targetMethod = getTargetMethod(element);
// Obtain the host types of the parameters
List<String> espressoTypes = getEspressoTypes(targetMethod);
// Spawn the name of the Substitutor we will create.
String substitutorName = getSubstitutorClassName(className, element.getSimpleName().toString(), espressoTypes);
// Obtain the hasReceiver() value from the @Substitution annotation.
boolean hasReceiver = getAnnotationValue(subst, "hasReceiver", Boolean.class);
// Obtain the (fully qualified) guest types parameters of the element.
List<String> guestTypes = getGuestTypes(targetMethod, hasReceiver);
// Obtain the fully qualified guest return type of the element.
String returnType = getReturnTypeFromHost(targetMethod);
TypeMirror nameProvider = getNameProvider(subst);
nameProvider = nameProvider == null ? defaultNameProvider : nameProvider;
TypeMirror languageFilter = getLanguageFilter(subst);
List<Byte> flagsList = getAnnotationValueList(subst, "flags", Byte.class);
byte flags = 0;
for (Byte flag : flagsList) {
flags |= flag;
}
TypeMirror encodedInlineGuard = getInlineValue(classWideInline, element);
boolean inlineInBytecode = encodedInlineGuard != null ||
// Implicit inlining of trivial substitutions.
isFlag(flags, SubstitutionFlag.IsTrivial);
TypeMirror decodedInlineGuard = (encodedInlineGuard == null || processingEnv.getTypeUtils().isSameType(encodedInlineGuard, noPredicate.asType()))
? null
: encodedInlineGuard;
if (inlineInBytecode) {
flags |= SubstitutionFlag.InlineInBytecode;
}
SubstitutorHelper helper = new SubstitutorHelper(this, element, targetClassName, targetMethodName, guestTypes, returnType, hasReceiver, nameProvider, languageFilter,
inlineInBytecode, decodedInlineGuard, substitutionClass, flags, group);
// Create the contents of the source file
String classFile = spawnSubstitutor(
substitutorName,
targetPackage,
className,
targetMethodName,
espressoTypes, helper);
commitSubstitution(substitutionAnnotation, targetPackage, substitutorName, classFile);
}
}
private TypeMirror getNameProvider(AnnotationMirror annotation) {
TypeMirror provider = getAnnotationValue(annotation, "nameProvider", TypeMirror.class);
if (provider != null) {
if (!processingEnv.getTypeUtils().isSameType(provider, noProvider.asType())) {
return provider;
}
}
return null;
}
private static TypeMirror getLanguageFilter(AnnotationMirror annotation) {
return getAnnotationValue(annotation, "languageFilter", TypeMirror.class);
}
/**
* Returns a tri-state String, depending on the return value:
* <ul>
* <li>If {@code null}: No bytecode-level inlining for this substitution.</li>
* <li>If equals {@code noPredicate}: No guard for a bytecode-level inlined substitution.</li>
* <li>Else: Guarded bytecode-level inlining for this substitution.</li>
* </ul>
*/
private TypeMirror getInlineValue(AnnotationMirror classWideAnnotation, Element element) {
AnnotationMirror inline = getAnnotation(element, inlineInBytecodeAnnotation);
inline = (inline == null) ? classWideAnnotation : inline;
if (inline == null) {
// No bytecode-level inlining.
return null;
}
return getAnnotationValue(inline, "guard", TypeMirror.class);
}
String getReturnTypeFromHost(ExecutableElement method) {
TypeMirror returnType = method.getReturnType();
AnnotationMirror a = getAnnotation(returnType, javaType);
if (a != null) {
// The return type element points to the actual return type, not the specific usage,
// passing the method as anchor for reporting errors instead.
return getClassFromJavaType(a, method);
}
return getInternalName(returnType);
}
private List<String> getGuestTypes(ExecutableElement inner, boolean hasReceiver) {
ArrayList<String> parameterTypeNames = new ArrayList<>();
boolean isReceiver = hasReceiver;
for (VariableElement parameter : inner.getParameters()) {
if (isActualParameter(parameter)) {
AnnotationMirror mirror = getAnnotation(parameter.asType(), javaType);
if (mirror != null) {
parameterTypeNames.add(getClassFromJavaType(mirror, parameter));
} else {
// @JavaType annotation not found -> primitive or j.l.Object
// All StaticObject(s) parameters must be annotated with @JavaType.
if (!isReceiver && processingEnv.getTypeUtils().isSameType(parameter.asType(), staticObject.asType())) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "non-receiver StaticObject parameters require the @JavaType annotation", parameter);
}
String arg = getInternalName(parameter.asType());
parameterTypeNames.add(arg);
}
isReceiver = false;
}
}
return parameterTypeNames;
}
static boolean isValidInternalType(String internalName) {
if (internalName.isEmpty()) {
return false;
}
if (internalName.length() == 1) {
switch (internalName.charAt(0)) {
case 'B': // byte
case 'C': // char
case 'D': // double
case 'F': // float
case 'I': // int
case 'J': // long
case 'S': // short
case 'V': // void
case 'Z': // boolean
return true;
default:
return false;
}
}
if (internalName.startsWith("[")) {
return isValidInternalType(internalName.substring(1));
}
return internalName.length() >= 3 && internalName.startsWith("L") && internalName.endsWith(";");
}
/**
* @param annotation @JavaType annotation
* @param element element containing the @JavaType annotation for error reporting
*
* @return the fully qualified internal name of the guest class.
*/
private String getClassFromJavaType(AnnotationMirror annotation, Element element) {
String internalName = getAnnotationValue(annotation, "internalName", String.class);
// .internalName overrides .value .
if (internalName == null || internalName.isEmpty()) {
TypeMirror value = getAnnotationValue(annotation, "value", TypeMirror.class);
internalName = getInternalName(value);
// JavaType.value = JavaType.class is used as the "no type" type, forbid accidental
// usages.
if (processingEnv.getTypeUtils().isSameType(value, javaType.asType())) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Empty @JavaType, must specify a type", element, annotation);
}
}
if (!isValidInternalType(internalName)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Invalid internalName: " + internalName, element, annotation);
}
// . is allowed in type names by the spec, as part of the name, not as separator.
// This avoids a common error e.g. using . instead of / as separator.
if (internalName.contains(".")) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Invalid . in internalName: '" + internalName + "'. Use / instead e.g. Ljava/lang/String;", element, annotation);
}
return internalName;
}
/**
* Given a type, returns its fully qualified internal name.
*
* In particular,
* <li>Primitives (boolean, int) use their JVM signature (Z, I).
* <li>Use "/" rather than "." to separate packages (/ex: java.lang.Object ->
* Ljava/lang/Object;)
* <li>Array types use "[" followed by the internal name of the component type.
*/
private String getInternalName(TypeMirror type) {
int arrayDims = 0;
TypeMirror elementalType = type;
while (elementalType.getKind() == TypeKind.ARRAY) {
elementalType = ((ArrayType) elementalType).getComponentType();
arrayDims += 1;
}
if (arrayDims == 0) {
return getNonArrayInternalName(type);
}
StringBuilder sb = new StringBuilder();
sb.repeat('[', arrayDims);
sb.append(getNonArrayInternalName(elementalType));
return sb.toString();
}
private String getNonArrayInternalName(TypeMirror type) {
TypeKind typeKind = type.getKind();
assert typeKind != TypeKind.ARRAY;
if (typeKind.isPrimitive() || typeKind == TypeKind.VOID) {
return switch (typeKind) {
case BOOLEAN -> "Z";
case BYTE -> "B";
case CHAR -> "C";
case SHORT -> "S";
case INT -> "I";
case FLOAT -> "F";
case DOUBLE -> "D";
case LONG -> "J";
case VOID -> "V";
default -> throw new IllegalStateException("Unexpected primitive type kind: " + typeKind);
};
}
if (typeKind != TypeKind.DECLARED) {
throw new IllegalStateException("Unexpected type kind: " + typeKind);
}
Element element = processingEnv.getTypeUtils().asElement(type);
Name binaryName = processingEnv.getElementUtils().getBinaryName((TypeElement) element);
StringBuilder sb = new StringBuilder();
sb.append("L").append(binaryName).append(';');
int idx = sb.indexOf(".", 1);
while (idx >= 0) {
sb.setCharAt(idx, '/');
idx = sb.indexOf(".", idx + 1);
}
return sb.toString();
}
private List<String> getEspressoTypes(ExecutableElement inner) {
List<String> espressoTypes = new ArrayList<>();
for (VariableElement parameter : inner.getParameters()) {
if (isActualParameter(parameter)) {
String arg = parameter.asType().toString();
String result = extractSimpleType(arg);
espressoTypes.add(result);
}
}
return espressoTypes;
}
@Override
public Set<String> getSupportedAnnotationTypes() {
Set<String> annotations = new HashSet<>();
annotations.add(ESPRESSO_SUBSTITUTIONS);
return annotations;
}
@Override
void processImpl(RoundEnvironment env) {
// Set up the different annotations, along with their values, that we will need.
this.espressoSubstitutions = getTypeElement(ESPRESSO_SUBSTITUTIONS);
this.substitutionAnnotation = getTypeElement(SUBSTITUTION);
this.inlineInBytecodeAnnotation = getTypeElement(INLINE_IN_BYTECODE);
this.javaType = getTypeElement(JAVA_TYPE);
this.noProvider = getTypeElement(NO_PROVIDER);
this.noPredicate = getTypeElement(INLINED_METHOD_PREDICATE_IMPORT);
verifyAnnotationMembers(espressoSubstitutions, "value", "nameProvider");
verifyAnnotationMembers(substitutionAnnotation, "methodName", "nameProvider", "languageFilter");
verifyAnnotationMembers(inlineInBytecodeAnnotation, "guard");
verifyAnnotationMembers(javaType, "value", "internalName");
// Actual process
for (Element e : env.getElementsAnnotatedWith(espressoSubstitutions)) {
if (!e.getModifiers().contains(Modifier.FINAL)) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Enclosing class for substitutions must be final", e);
}
processElement(e);
}
}
private void verifyAnnotationMembers(TypeElement annotation, String... methods) {
List<Name> enclosedMethods = annotation.getEnclosedElements().stream().filter(e -> e.getKind() == ElementKind.METHOD).map(Element::getSimpleName).collect(Collectors.toList());
for (String methodName : methods) {
if (enclosedMethods.stream().noneMatch(em -> em.contentEquals(methodName))) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Annotation is missing member: " + methodName, annotation);
}
}
}
@Override
List<String> expectedImports(String className, String targetMethodName, List<String> parameterTypeName, SubstitutionHelper helper) {
List<String> expectedImports = new ArrayList<>();
SubstitutorHelper h = (SubstitutorHelper) helper;
expectedImports.add(substitutorPackage + "." + SUBSTITUTOR);
if (h.inlineInBytecode) {
expectedImports.add(VIRTUAL_FRAME_IMPORT);
expectedImports.add(INLINED_FRAME_ACCESS_IMPORT);
if (!parameterTypeName.isEmpty()) {
expectedImports.add(ESPRESSO_FRAME_IMPORT);
}
}
if (parameterTypeName.contains("StaticObject") || h.returnType.equals("V")) {
expectedImports.add(IMPORT_STATIC_OBJECT);
}
if (helper.isNodeTarget()) {
expectedImports.add(helper.getNodeTarget().getQualifiedName().toString());
}
return expectedImports;
}
@Override
FieldBuilder generateFactoryConstructor(FieldBuilder factoryBuilder, String substitutorName, String factoryType, String substitutorType, String targetMethodName, List<String> parameterTypeName,
SubstitutionHelper helper) {
/*- Calls:
new Factory(Object methodName,
Object substitutionClassName,
String returnType,
String[] parameterTypes,
boolean hasReceiver,
LanguageFilter filter,
byte flags,
InlinedMethodPredicate guard,
Supplier<? extends JavaSubstitution> factory);
}
*/
SubstitutorHelper h = (SubstitutorHelper) helper;
StatementBuilder declaration = new StatementBuilder();
declaration.addContent("new ", factoryType, "(").addLine().raiseIndent();
if (h.nameProvider == null) {
declaration.addContent(ProcessorUtils.stringify(h.guestMethodName), ',').addLine();
declaration.addContent(ProcessorUtils.stringify(h.targetClassName), ',').addLine();
} else {
declaration.addContent(h.nameProvider, '.', INSTANCE, '.', GET_METHOD_NAME, '(', ProcessorUtils.stringify(h.guestMethodName), "),").addLine();
declaration.addContent(h.nameProvider, '.', INSTANCE, '.', SUBSTITUTION_CLASS_NAMES, "(),").addLine();
}
declaration.addContent(ProcessorUtils.stringify(h.returnType), ",").addLine();
declaration.addContent(generateParameterTypes(h.guestTypeNames, 4), ',').addLine();
declaration.addContent(h.hasReceiver, ',').addLine();
declaration.addContent(h.languageFilter, '.', INSTANCE, ',').addLine();
declaration.addContent("(byte) ", h.flags, ',').addLine();
declaration.addContent(h.guardValue != null ? (h.guardValue + "." + INSTANCE) : "null", ',').addLine();
declaration.addContent(substitutorName + "::new").addLine();
declaration.lowerIndent().addContent(")");
factoryBuilder.withDeclaration(declaration);
return factoryBuilder;
}
@Override
ClassBuilder generateInvoke(ClassBuilder classBuilder, String className, String targetMethodName, List<String> parameterTypeName, SubstitutionHelper helper) {
SubstitutorHelper h = (SubstitutorHelper) helper;
MethodBuilder invoke = new MethodBuilder("invoke") //
.withOverrideAnnotation() //
.withModifiers(new ModifierBuilder().asPublic().asFinal()) //
.withParams("Object[] " + ARGS_NAME) //
.withReturnType("Object");
int argIndex = 0;
for (String argType : parameterTypeName) {
invoke.addBodyLine(argType, " ", ARG_NAME, argIndex, " = ", castTo(ARGS_NAME + "[" + argIndex + "]", argType), ";");
argIndex++;
}
setEspressoContextVar(invoke, helper);
if (h.returnType.equals("V")) {
invoke.addBodyLine(extractInvocation(argIndex, h).trim(), ";\n");
invoke.addBodyLine("return StaticObject.NULL;");
} else {
invoke.addBodyLine("return ", extractInvocation(argIndex, h).trim(), ";\n");
}
classBuilder.withMethod(invoke);
if (h.inlineInBytecode) {
return generateInvokeInlined(classBuilder, parameterTypeName, helper);
}
return classBuilder;
}
@SuppressWarnings("fallthrough")
private ClassBuilder generateInvokeInlined(ClassBuilder classBuilder, List<String> parameterTypeName, SubstitutionHelper helper) {
SubstitutorHelper h = (SubstitutorHelper) helper;
MethodBuilder invoke = new MethodBuilder("invokeInlined") //
.withOverrideAnnotation() //
.withModifiers(new ModifierBuilder().asPublic().asFinal()) //
.withParams("VirtualFrame frame", "int top", "InlinedFrameAccess frameAccess") //
.withReturnType("void");
int delta = 1;
int argCount = parameterTypeName.size();
for (int argIndex = argCount - 1; argIndex >= 0; argIndex--) {
String argType = parameterTypeName.get(argIndex);
String popMethod;
boolean doCast = false;
int slotCount = 1;
switch (argType) {
case "byte":
case "boolean":
case "char":
case "short":
doCast = true;
// fall through
case "int":
popMethod = "popInt";
break;
case "float":
popMethod = "popFloat";
break;
case "long":
slotCount = 2;
popMethod = "popLong";
break;
case "double":
slotCount = 2;
popMethod = "popDouble";
break;
default:
popMethod = "popObject";
break;
}
if (argType.equals("boolean")) {
invoke.addBodyLine(argType, " ", ARG_NAME, argIndex, " = ", ESPRESSO_FRAME + "." + popMethod + "(frame, top - " + delta + ") != 0", ";");
} else {
String cast = doCast ? "(" + argType + ") " : "";
invoke.addBodyLine(argType, " ", ARG_NAME, argIndex, " = ", cast, ESPRESSO_FRAME + "." + popMethod + "(frame, top - " + delta + ")", ";");
}
delta += slotCount;
}
setEspressoContextVar(invoke, helper);
if (h.returnType.equals("V")) {
invoke.addBodyLine(extractInvocation(argCount, h).trim(), ";");
} else {
invoke.addBodyLine("frameAccess.pushResult(frame, ", extractResultToPush(extractInvocation(argCount, h).trim(), h), ");");
}
return classBuilder.withMethod(invoke);
}
private static String extractResultToPush(String invocation, SubstitutorHelper h) {
if (h.returnType.equals("Z")) {
return "(" + invocation + ") ? 1 : 0";
} else {
return invocation;
}
}
private static boolean isFlag(byte flags, byte flag) {
return (flags & flag) != 0;
}
}
|
googleapis/google-cloud-java | 36,091 | java-alloydb/proto-google-cloud-alloydb-v1alpha/src/main/java/com/google/cloud/alloydb/v1alpha/MigrationSource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1alpha/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1alpha;
/**
*
*
* <pre>
* Subset of the source instance configuration that is available when reading
* the cluster resource.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.MigrationSource}
*/
public final class MigrationSource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1alpha.MigrationSource)
MigrationSourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use MigrationSource.newBuilder() to construct.
private MigrationSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MigrationSource() {
hostPort_ = "";
referenceId_ = "";
sourceType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MigrationSource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ResourcesProto
.internal_static_google_cloud_alloydb_v1alpha_MigrationSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ResourcesProto
.internal_static_google_cloud_alloydb_v1alpha_MigrationSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.MigrationSource.class,
com.google.cloud.alloydb.v1alpha.MigrationSource.Builder.class);
}
/**
*
*
* <pre>
* Denote the type of migration source that created this cluster.
* </pre>
*
* Protobuf enum {@code google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType}
*/
public enum MigrationSourceType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Migration source is unknown.
* </pre>
*
* <code>MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0;</code>
*/
MIGRATION_SOURCE_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* DMS source means the cluster was created via DMS migration job.
* </pre>
*
* <code>DMS = 1;</code>
*/
DMS(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Migration source is unknown.
* </pre>
*
* <code>MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int MIGRATION_SOURCE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* DMS source means the cluster was created via DMS migration job.
* </pre>
*
* <code>DMS = 1;</code>
*/
public static final int DMS_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MigrationSourceType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MigrationSourceType forNumber(int value) {
switch (value) {
case 0:
return MIGRATION_SOURCE_TYPE_UNSPECIFIED;
case 1:
return DMS;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>() {
public MigrationSourceType findValueByNumber(int number) {
return MigrationSourceType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.MigrationSource.getDescriptor().getEnumTypes().get(0);
}
private static final MigrationSourceType[] VALUES = values();
public static MigrationSourceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MigrationSourceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType)
}
public static final int HOST_PORT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object hostPort_ = "";
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The hostPort.
*/
@java.lang.Override
public java.lang.String getHostPort() {
java.lang.Object ref = hostPort_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hostPort_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for hostPort.
*/
@java.lang.Override
public com.google.protobuf.ByteString getHostPortBytes() {
java.lang.Object ref = hostPort_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hostPort_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REFERENCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object referenceId_ = "";
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The referenceId.
*/
@java.lang.Override
public java.lang.String getReferenceId() {
java.lang.Object ref = referenceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
referenceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for referenceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getReferenceIdBytes() {
java.lang.Object ref = referenceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
referenceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SOURCE_TYPE_FIELD_NUMBER = 3;
private int sourceType_ = 0;
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for sourceType.
*/
@java.lang.Override
public int getSourceTypeValue() {
return sourceType_;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The sourceType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType getSourceType() {
com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType result =
com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType.forNumber(sourceType_);
return result == null
? com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostPort_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, hostPort_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(referenceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, referenceId_);
}
if (sourceType_
!= com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType
.MIGRATION_SOURCE_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(3, sourceType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostPort_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, hostPort_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(referenceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, referenceId_);
}
if (sourceType_
!= com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType
.MIGRATION_SOURCE_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, sourceType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1alpha.MigrationSource)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1alpha.MigrationSource other =
(com.google.cloud.alloydb.v1alpha.MigrationSource) obj;
if (!getHostPort().equals(other.getHostPort())) return false;
if (!getReferenceId().equals(other.getReferenceId())) return false;
if (sourceType_ != other.sourceType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + HOST_PORT_FIELD_NUMBER;
hash = (53 * hash) + getHostPort().hashCode();
hash = (37 * hash) + REFERENCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getReferenceId().hashCode();
hash = (37 * hash) + SOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + sourceType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1alpha.MigrationSource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Subset of the source instance configuration that is available when reading
* the cluster resource.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.MigrationSource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1alpha.MigrationSource)
com.google.cloud.alloydb.v1alpha.MigrationSourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ResourcesProto
.internal_static_google_cloud_alloydb_v1alpha_MigrationSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ResourcesProto
.internal_static_google_cloud_alloydb_v1alpha_MigrationSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.MigrationSource.class,
com.google.cloud.alloydb.v1alpha.MigrationSource.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1alpha.MigrationSource.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
hostPort_ = "";
referenceId_ = "";
sourceType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1alpha.ResourcesProto
.internal_static_google_cloud_alloydb_v1alpha_MigrationSource_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.MigrationSource getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1alpha.MigrationSource.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.MigrationSource build() {
com.google.cloud.alloydb.v1alpha.MigrationSource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.MigrationSource buildPartial() {
com.google.cloud.alloydb.v1alpha.MigrationSource result =
new com.google.cloud.alloydb.v1alpha.MigrationSource(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1alpha.MigrationSource result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.hostPort_ = hostPort_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.referenceId_ = referenceId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sourceType_ = sourceType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1alpha.MigrationSource) {
return mergeFrom((com.google.cloud.alloydb.v1alpha.MigrationSource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1alpha.MigrationSource other) {
if (other == com.google.cloud.alloydb.v1alpha.MigrationSource.getDefaultInstance())
return this;
if (!other.getHostPort().isEmpty()) {
hostPort_ = other.hostPort_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getReferenceId().isEmpty()) {
referenceId_ = other.referenceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.sourceType_ != 0) {
setSourceTypeValue(other.getSourceTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
hostPort_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
referenceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
sourceType_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object hostPort_ = "";
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The hostPort.
*/
public java.lang.String getHostPort() {
java.lang.Object ref = hostPort_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hostPort_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for hostPort.
*/
public com.google.protobuf.ByteString getHostPortBytes() {
java.lang.Object ref = hostPort_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hostPort_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The hostPort to set.
* @return This builder for chaining.
*/
public Builder setHostPort(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
hostPort_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearHostPort() {
hostPort_ = getDefaultInstance().getHostPort();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for hostPort to set.
* @return This builder for chaining.
*/
public Builder setHostPortBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
hostPort_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object referenceId_ = "";
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The referenceId.
*/
public java.lang.String getReferenceId() {
java.lang.Object ref = referenceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
referenceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for referenceId.
*/
public com.google.protobuf.ByteString getReferenceIdBytes() {
java.lang.Object ref = referenceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
referenceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The referenceId to set.
* @return This builder for chaining.
*/
public Builder setReferenceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
referenceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearReferenceId() {
referenceId_ = getDefaultInstance().getReferenceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for referenceId to set.
* @return This builder for chaining.
*/
public Builder setReferenceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
referenceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int sourceType_ = 0;
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for sourceType.
*/
@java.lang.Override
public int getSourceTypeValue() {
return sourceType_;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for sourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceTypeValue(int value) {
sourceType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The sourceType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType getSourceType() {
com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType result =
com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType.forNumber(
sourceType_);
return result == null
? com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The sourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceType(
com.google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
sourceType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSourceType() {
bitField0_ = (bitField0_ & ~0x00000004);
sourceType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1alpha.MigrationSource)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1alpha.MigrationSource)
private static final com.google.cloud.alloydb.v1alpha.MigrationSource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1alpha.MigrationSource();
}
public static com.google.cloud.alloydb.v1alpha.MigrationSource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MigrationSource> PARSER =
new com.google.protobuf.AbstractParser<MigrationSource>() {
@java.lang.Override
public MigrationSource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MigrationSource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MigrationSource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.MigrationSource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ozone | 36,321 | hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/hdds/upgrade/TestHDDSUpgrade.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.upgrade;
import static java.lang.Thread.sleep;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_HEARTBEAT_INTERVAL;
import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PIPELINE_REPORT_INTERVAL;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State.CLOSED;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.HEALTHY;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.HEALTHY_READONLY;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_DATANODE_PIPELINE_LIMIT;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_RATIS_PIPELINE_LIMIT;
import static org.apache.hadoop.hdds.scm.pipeline.Pipeline.PipelineState.OPEN;
import static org.apache.hadoop.ozone.upgrade.InjectedUpgradeFinalizationExecutor.UpgradeTestInjectionPoints.AFTER_COMPLETE_FINALIZATION;
import static org.apache.hadoop.ozone.upgrade.InjectedUpgradeFinalizationExecutor.UpgradeTestInjectionPoints.AFTER_POST_FINALIZE_UPGRADE;
import static org.apache.hadoop.ozone.upgrade.InjectedUpgradeFinalizationExecutor.UpgradeTestInjectionPoints.AFTER_PRE_FINALIZE_UPGRADE;
import static org.apache.hadoop.ozone.upgrade.InjectedUpgradeFinalizationExecutor.UpgradeTestInjectionPoints.BEFORE_PRE_FINALIZE_UPGRADE;
import static org.apache.hadoop.ozone.upgrade.UpgradeFinalization.Status.ALREADY_FINALIZED;
import static org.apache.hadoop.ozone.upgrade.UpgradeFinalization.Status.FINALIZATION_DONE;
import static org.apache.hadoop.ozone.upgrade.UpgradeFinalization.Status.FINALIZATION_REQUIRED;
import static org.apache.hadoop.ozone.upgrade.UpgradeFinalization.Status.STARTING_FINALIZATION;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.XceiverClientManager;
import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerManager;
import org.apache.hadoop.hdds.scm.exceptions.SCMException;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.scm.pipeline.PipelineManager;
import org.apache.hadoop.hdds.scm.server.SCMConfigurator;
import org.apache.hadoop.hdds.scm.server.SCMStorageConfig;
import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
import org.apache.hadoop.hdds.scm.server.upgrade.SCMUpgradeFinalizationContext;
import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
import org.apache.hadoop.ozone.HddsDatanodeService;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.MiniOzoneClusterProvider;
import org.apache.hadoop.ozone.MiniOzoneHAClusterImpl;
import org.apache.hadoop.ozone.UniformDatanodesFactory;
import org.apache.hadoop.ozone.client.ObjectStore;
import org.apache.hadoop.ozone.client.OzoneClient;
import org.apache.hadoop.ozone.client.OzoneClientFactory;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
import org.apache.hadoop.ozone.om.OMStorage;
import org.apache.hadoop.ozone.om.upgrade.OMLayoutFeature;
import org.apache.hadoop.ozone.upgrade.BasicUpgradeFinalizer;
import org.apache.hadoop.ozone.upgrade.InjectedUpgradeFinalizationExecutor;
import org.apache.hadoop.ozone.upgrade.InjectedUpgradeFinalizationExecutor.UpgradeTestInjectionPoints;
import org.apache.hadoop.ozone.upgrade.UpgradeFinalization.StatusAndMessages;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.ozone.test.LambdaTestUtils;
import org.apache.ozone.test.tag.Flaky;
import org.apache.ozone.test.tag.Slow;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test SCM and DataNode Upgrade sequence.
*/
@Flaky({"HDDS-6028", "HDDS-6049"})
@Slow
public class TestHDDSUpgrade {
/**
* Set a timeout for each test.
*/
private static final Logger LOG =
LoggerFactory.getLogger(TestHDDSUpgrade.class);
private static final int NUM_DATA_NODES = 3;
private static final int NUM_SCMS = 3;
private MiniOzoneHAClusterImpl cluster;
private OzoneConfiguration conf;
private StorageContainerManager scm;
private ContainerManager scmContainerManager;
private PipelineManager scmPipelineManager;
private static final int NUM_CONTAINERS_CREATED = 1;
private HDDSLayoutVersionManager scmVersionManager;
private AtomicBoolean testPassed = new AtomicBoolean(true);
private static
InjectedUpgradeFinalizationExecutor<SCMUpgradeFinalizationContext>
scmFinalizationExecutor;
private static final ReplicationConfig RATIS_THREE =
ReplicationConfig.fromProtoTypeAndFactor(HddsProtos.ReplicationType.RATIS,
HddsProtos.ReplicationFactor.THREE);
private static MiniOzoneClusterProvider clusterProvider;
/**
* Create a MiniDFSCluster for testing.
*
* @throws IOException
*/
@BeforeEach
public void setUp() throws Exception {
init();
}
@AfterEach
public void tearDown() throws Exception {
shutdown();
}
@BeforeAll
public static void initClass() {
OzoneConfiguration conf = new OzoneConfiguration();
conf.setTimeDuration(HDDS_PIPELINE_REPORT_INTERVAL, 1000,
TimeUnit.MILLISECONDS);
conf.setInt(OZONE_DATANODE_PIPELINE_LIMIT, 1);
// allow only one FACTOR THREE pipeline.
conf.setInt(OZONE_SCM_RATIS_PIPELINE_LIMIT, NUM_DATA_NODES + 1);
conf.setInt(SCMStorageConfig.TESTING_INIT_LAYOUT_VERSION_KEY, HDDSLayoutFeature.INITIAL_VERSION.layoutVersion());
conf.setInt(OMStorage.TESTING_INIT_LAYOUT_VERSION_KEY, OMLayoutFeature.INITIAL_VERSION.layoutVersion());
conf.setTimeDuration(HDDS_HEARTBEAT_INTERVAL, 500, TimeUnit.MILLISECONDS);
conf.setTimeDuration(OZONE_SCM_HEARTBEAT_PROCESS_INTERVAL, 500, TimeUnit.MILLISECONDS);
scmFinalizationExecutor = new InjectedUpgradeFinalizationExecutor<>();
SCMConfigurator scmConfigurator = new SCMConfigurator();
scmConfigurator.setUpgradeFinalizationExecutor(scmFinalizationExecutor);
MiniOzoneHAClusterImpl.Builder builder = MiniOzoneCluster.newHABuilder(conf);
builder.setNumOfStorageContainerManagers(NUM_SCMS)
.setSCMConfigurator(scmConfigurator)
.setNumDatanodes(NUM_DATA_NODES)
.setDatanodeFactory(UniformDatanodesFactory.newBuilder()
.setLayoutVersion(HDDSLayoutFeature.INITIAL_VERSION.layoutVersion())
.build());
// Setting the provider to a max of 100 clusters. Some of the tests here
// use multiple clusters, so its hard to know exactly how many will be
// needed. This means the provider will create 1 extra cluster than needed
// but that will not greatly affect runtimes.
clusterProvider = new MiniOzoneClusterProvider(builder, 100);
}
@AfterAll
public static void afterClass() throws InterruptedException {
clusterProvider.shutdown();
}
public void init() throws Exception {
cluster = (MiniOzoneHAClusterImpl) clusterProvider.provide();
conf = cluster.getConf();
loadSCMState();
}
/**
* Shutdown MiniDFSCluster.
*/
public void shutdown() throws IOException, InterruptedException {
if (cluster != null) {
clusterProvider.destroy(cluster);
}
}
/*
* Some tests repeatedly modify the cluster. Helper function to reload the
* latest SCM state.
*/
private void loadSCMState() {
scm = cluster.getStorageContainerManager();
scmContainerManager = scm.getContainerManager();
scmPipelineManager = scm.getPipelineManager();
scmVersionManager = scm.getLayoutVersionManager();
}
/*
* helper function to create a Key.
*/
private void createKey() throws IOException {
final String uniqueId = "testhddsupgrade";
try (OzoneClient client = OzoneClientFactory.getRpcClient(conf)) {
ObjectStore objectStore = client.getObjectStore();
objectStore.createVolume(uniqueId);
objectStore.getVolume(uniqueId).createBucket(uniqueId);
OzoneOutputStream key =
objectStore.getVolume(uniqueId).getBucket(uniqueId)
.createKey(uniqueId, 1024, ReplicationType.RATIS,
ReplicationFactor.THREE, new HashMap<>());
key.write(uniqueId.getBytes(UTF_8));
key.flush();
key.close();
}
}
/*
* Helper function to test that we can create new pipelines Post-Upgrade.
*/
private void testPostUpgradePipelineCreation()
throws IOException, TimeoutException {
Pipeline ratisPipeline1 = scmPipelineManager.createPipeline(RATIS_THREE);
scmPipelineManager.openPipeline(ratisPipeline1.getId());
assertEquals(0,
scmPipelineManager.getNumberOfContainers(ratisPipeline1.getId()));
PipelineID pid = scmContainerManager.allocateContainer(RATIS_THREE,
"Owner1").getPipelineID();
assertEquals(1, scmPipelineManager.getNumberOfContainers(pid));
assertEquals(pid, ratisPipeline1.getId());
}
/*
* Helper function to wait for Pipeline creation.
*/
private void waitForPipelineCreated() throws Exception {
LambdaTestUtils.await(10000, 500, () -> {
List<Pipeline> pipelines =
scmPipelineManager.getPipelines(RATIS_THREE, OPEN);
return pipelines.size() == 1;
});
}
/*
* Helper function for container creation.
*/
private void createTestContainers() throws IOException, TimeoutException {
XceiverClientManager xceiverClientManager = new XceiverClientManager(conf);
ContainerInfo ci1 = scmContainerManager.allocateContainer(
RATIS_THREE, "Owner1");
Pipeline ratisPipeline1 =
scmPipelineManager.getPipeline(ci1.getPipelineID());
scmPipelineManager.openPipeline(ratisPipeline1.getId());
XceiverClientSpi client1 =
xceiverClientManager.acquireClient(ratisPipeline1);
ContainerProtocolCalls.createContainer(client1,
ci1.getContainerID(), null);
xceiverClientManager.releaseClient(client1, false);
}
/*
* Happy Path Test Case.
*/
@Test
public void testFinalizationFromInitialVersionToLatestVersion()
throws Exception {
waitForPipelineCreated();
createTestContainers();
// Test the Pre-Upgrade conditions on SCM as well as DataNodes.
TestHddsUpgradeUtils.testPreUpgradeConditionsSCM(
cluster.getStorageContainerManagersList());
TestHddsUpgradeUtils.testPreUpgradeConditionsDataNodes(
cluster.getHddsDatanodes());
Set<PipelineID> preUpgradeOpenPipelines =
scmPipelineManager.getPipelines(RATIS_THREE, OPEN)
.stream()
.map(Pipeline::getId)
.collect(Collectors.toSet());
// Trigger Finalization on the SCM
StatusAndMessages status = scm.getFinalizationManager().finalizeUpgrade(
"xyz");
assertEquals(STARTING_FINALIZATION, status.status());
// Wait for the Finalization to complete on the SCM.
TestHddsUpgradeUtils.waitForFinalizationFromClient(
cluster.getStorageContainerLocationClient(), "xyz");
Set<PipelineID> postUpgradeOpenPipelines =
scmPipelineManager.getPipelines(RATIS_THREE, OPEN)
.stream()
.map(Pipeline::getId)
.collect(Collectors.toSet());
// No pipelines from before the upgrade should still be open after the
// upgrade.
long numPreUpgradeOpenPipelines = preUpgradeOpenPipelines
.stream()
.filter(postUpgradeOpenPipelines::contains)
.count();
assertEquals(0, numPreUpgradeOpenPipelines);
// Verify Post-Upgrade conditions on the SCM.
TestHddsUpgradeUtils.testPostUpgradeConditionsSCM(
cluster.getStorageContainerManagersList(),
NUM_CONTAINERS_CREATED, NUM_DATA_NODES);
// All datanodes on the SCM should have moved to HEALTHY-READONLY state.
TestHddsUpgradeUtils.testDataNodesStateOnSCM(
cluster.getStorageContainerManagersList(), NUM_DATA_NODES,
HEALTHY_READONLY, HEALTHY);
// Verify the SCM has driven all the DataNodes through Layout Upgrade.
// In the happy path case, no containers should have been quasi closed as
// a result of the upgrade.
TestHddsUpgradeUtils.testPostUpgradeConditionsDataNodes(
cluster.getHddsDatanodes(), NUM_CONTAINERS_CREATED, CLOSED);
// Test that we can use a pipeline after upgrade.
// Will fail with exception if there are no pipelines.
try (OzoneClient client = cluster.newClient()) {
ObjectStore store = client.getObjectStore();
store.createVolume("vol1");
store.getVolume("vol1").createBucket("buc1");
store.getVolume("vol1").getBucket("buc1").createKey("key1", 100,
ReplicationType.RATIS, ReplicationFactor.THREE, new HashMap<>());
}
}
/*
* All the subsequent tests here are failure cases. Some of the tests below
* could simultaneously fail one or more nodes at specific execution points
* and in different thread contexts.
* Upgrade path key execution points are defined in
* UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
/*
* Helper function to inject SCM failure and a SCM restart at a given
* execution point during SCM-Upgrade.
*
* Injects Failure in : SCM
* Executing-Thread-Context : SCM-Upgrade
*/
private Boolean injectSCMFailureDuringSCMUpgrade()
throws InterruptedException, TimeoutException, AuthenticationException,
IOException {
// For some tests this could get called in a different thread context.
// We need to guard concurrent updates to the cluster.
synchronized (cluster) {
cluster.restartStorageContainerManager(true);
loadSCMState();
}
// The ongoing current SCM Upgrade is getting aborted at this point. We
// need to schedule a new SCM Upgrade on a different thread context.
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
loadSCMState();
scm.getFinalizationManager().finalizeUpgrade("xyz");
} catch (IOException e) {
e.printStackTrace();
testPassed.set(false);
}
}
});
t.start();
return true;
}
/*
* Helper function to inject DataNode failures and DataNode restarts at a
* given execution point during SCM-Upgrade. Please note that it fails all
* the DataNodes in the cluster and is part of test cases that simulate
* multi-node failure at specific code-execution points during SCM Upgrade.
* Please note that this helper function should be called in the thread
* context of an SCM-Upgrade only. The return value has a significance that
* it does not abort the currently ongoing SCM upgrade. because this
* failure injection does not fail the SCM node and only impacts datanodes,
* we do not need to schedule another scm-finalize-upgrade here.
*
* Injects Failure in : All the DataNodes
* Executing-Thread-Context : SCM-Upgrade
*/
private Boolean injectDataNodeFailureDuringSCMUpgrade() {
try {
// Work on a Copy of current set of DataNodes to avoid
// running into tricky situations.
List<HddsDatanodeService> currentDataNodes =
new ArrayList<>(cluster.getHddsDatanodes());
for (HddsDatanodeService ds: currentDataNodes) {
DatanodeDetails dn = ds.getDatanodeDetails();
LOG.info("Restarting datanode {}", dn);
cluster.restartHddsDatanode(dn, false);
}
cluster.waitForClusterToBeReady();
} catch (Exception e) {
LOG.error("DataNode Restarts Failed!", e);
testPassed.set(false);
}
loadSCMState();
// returning false from injection function, continues currently ongoing
// SCM-Upgrade-Finalization.
return false;
}
/*
* Helper function to inject a DataNode failure and restart for a specific
* DataNode. This injection function can target a specific DataNode and
* thus facilitates getting called in the upgrade-finalization thread context
* of that specific DataNode.
*
* Injects Failure in : Given DataNodes
* Executing-Thread-Context : the same DataNode that we are failing here.
*/
private Thread injectDataNodeFailureDuringDataNodeUpgrade(
DatanodeDetails dn) {
Thread t = null;
try {
// Schedule the DataNode restart on a separate thread context
// otherwise DataNode restart will hang. Also any cluster modification
// needs to be guarded since it could get modified in multiple independent
// threads.
t = new Thread(new Runnable() {
@Override
public void run() {
try {
synchronized (cluster) {
cluster.restartHddsDatanode(dn, true);
}
} catch (Exception e) {
e.printStackTrace();
testPassed.set(false);
}
}
});
} catch (Exception e) {
LOG.info("DataNode Restart Failed!");
fail(e.getMessage());
}
return t;
}
/*
* Helper function to inject coordinated failures and restarts across
* all the DataNode as well as SCM. This can help create targeted test cases
* to inject such comprehensive failures in SCM-Upgrade-Context as well as
* DataNode-Upgrade-Context.
*
* Injects Failure in : SCM as well as ALL the DataNodes.
* Executing-Thread-Context : Either the SCM-Upgrade-Finalizer or the
* DataNode-Upgrade-Finalizer.
*/
private Thread injectSCMAndDataNodeFailureTogetherAtTheSameTime()
throws InterruptedException, TimeoutException, AuthenticationException,
IOException {
// This needs to happen in a separate thread context otherwise
// DataNode restart will hang.
return new Thread(new Runnable() {
@Override
public void run() {
try {
// Since we are modifying cluster in an independent thread context,
// we synchronize access to it to avoid concurrent modification
// exception.
synchronized (cluster) {
// Work on a Copy of current set of DataNodes to avoid
// running into tricky situations.
List<HddsDatanodeService> currentDataNodes =
new ArrayList<>(cluster.getHddsDatanodes());
for (HddsDatanodeService ds: currentDataNodes) {
DatanodeDetails dn = ds.getDatanodeDetails();
cluster.restartHddsDatanode(dn, false);
}
cluster.restartStorageContainerManager(false);
cluster.waitForClusterToBeReady();
}
} catch (Exception e) {
e.printStackTrace();
testPassed.set(false);
}
}
});
}
/*
* We have various test cases to target single-node or multi-node failures
* below.
**/
/*
* One node(SCM) failure case:
* Thread-Context : SCM-Upgrade
*
* Test SCM failure During SCM Upgrade before execution point
* "PreFinalizeUpgrade". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testScmFailuresBeforeScmPreFinalizeUpgrade()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
BEFORE_PRE_FINALIZE_UPGRADE,
this::injectSCMFailureDuringSCMUpgrade);
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* One node(SCM) failure case:
* Thread-Context : SCM-Upgrade
*
* Test SCM failure During SCM Upgrade after execution point
* "PreFinalizeUpgrade". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testScmFailuresAfterScmPreFinalizeUpgrade()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
AFTER_PRE_FINALIZE_UPGRADE,
this::injectSCMFailureDuringSCMUpgrade);
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* One node(SCM) failure case:
* Thread-Context : SCM-Upgrade
*
* Test SCM failure During SCM Upgrade after execution point
* "CompleteFinalization". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testScmFailuresAfterScmCompleteFinalization()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
AFTER_COMPLETE_FINALIZATION,
() -> this.injectSCMFailureDuringSCMUpgrade());
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* One node(SCM) failure case:
* Thread-Context : SCM-Upgrade
*
* Test SCM failure During SCM Upgrade after execution point
* "PostFinalizeUpgrade". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testScmFailuresAfterScmPostFinalizeUpgrade()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
AFTER_POST_FINALIZE_UPGRADE,
() -> this.injectSCMFailureDuringSCMUpgrade());
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* Multi node(all DataNodes) failure case:
* Thread-Context : SCM-Upgrade
*
* Test all DataNode failures During SCM Upgrade before execution point
* "PreFinalizeUpgrade". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testAllDataNodeFailuresBeforeScmPreFinalizeUpgrade()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
BEFORE_PRE_FINALIZE_UPGRADE,
this::injectDataNodeFailureDuringSCMUpgrade);
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* Multi node(all DataNodes) failure case:
* Thread-Context : SCM-Upgrade
*
* Test all DataNode failures During SCM Upgrade before execution point
* "PreFinalizeUpgrade". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testAllDataNodeFailuresAfterScmPreFinalizeUpgrade()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
AFTER_PRE_FINALIZE_UPGRADE,
this::injectDataNodeFailureDuringSCMUpgrade);
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* Multi node(all DataNodes) failure case:
* Thread-Context : SCM-Upgrade
*
* Test all DataNode failures During SCM Upgrade after execution point
* "CompleteFinalization". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testAllDataNodeFailuresAfterScmCompleteFinalization()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
AFTER_COMPLETE_FINALIZATION,
this::injectDataNodeFailureDuringSCMUpgrade);
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* Multi node(all DataNodes) failure case:
* Thread-Context : SCM-Upgrade
*
* Test all DataNode failures During SCM Upgrade after execution point
* "PostFinalizeUpgrade". All meaningful Upgrade execution points
* are defined in UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testAllDataNodeFailuresAfterScmPostFinalizeUpgrade()
throws Exception {
testPassed.set(true);
scmFinalizationExecutor.configureTestInjectionFunction(
AFTER_POST_FINALIZE_UPGRADE,
this::injectDataNodeFailureDuringSCMUpgrade);
testFinalizationWithFailureInjectionHelper(null);
assertTrue(testPassed.get());
}
/*
* Single node(targeted DataNode) failure case:
* Thread-Context : DataNode-Upgrade.
*
* Fail the same DataNode that is going through Upgrade-processing at a
* specific code execution point. This test covers all the meaningful
* Upgrade execution points as defined in
* UpgradeFinalizer:UpgradeTestInjectionPoints.
*/
@Test
public void testDataNodeFailuresDuringDataNodeUpgrade()
throws Exception {
for (UpgradeTestInjectionPoints injectionPoint:
UpgradeTestInjectionPoints.values()) {
testPassed.set(true);
// Configure a given data node to fail itself when it's
// corresponding Upgrade-Finalizer reaches a specific point in it's
// execution.
HddsDatanodeService ds = cluster.getHddsDatanodes().get(1);
Thread failureInjectionThread =
injectDataNodeFailureDuringDataNodeUpgrade(ds.getDatanodeDetails());
InjectedUpgradeFinalizationExecutor dataNodeFinalizationExecutor =
new InjectedUpgradeFinalizationExecutor();
dataNodeFinalizationExecutor.configureTestInjectionFunction(
injectionPoint, () -> {
failureInjectionThread.start();
return true;
});
((BasicUpgradeFinalizer)ds.getDatanodeStateMachine()
.getUpgradeFinalizer())
.setFinalizationExecutor(dataNodeFinalizationExecutor);
testFinalizationWithFailureInjectionHelper(failureInjectionThread);
assertTrue(testPassed.get());
synchronized (cluster) {
shutdown();
init();
}
LOG.info("testDataNodeFailuresDuringDataNodeUpgrade: Failure Injection " +
"Point {} passed.", injectionPoint.name());
}
}
/*
* Two nodes(SCM and a targeted DataNode) combination failure case:
* Thread-Contexts :
* DataNode failure in its own DataNode-Upgrade-Context .
* SCM failure in its own SCM-Upgrade-Context .
*
* Fail the same DataNode that is going through its own Upgrade-processing
* at a specific code execution point. Also fail the SCM when SCM is going
* through upgrade-finalization. This test covers all the combinations of
* SCM-Upgrade-execution points and DataNode-Upgrade-execution points.
*/
@Test
public void testAllPossibleDataNodeFailuresAndSCMFailures()
throws Exception {
// Configure a given data node to restart itself when it's
// corresponding Upgrade-Finalizer reaches a specific point in it's
// execution.
for (UpgradeTestInjectionPoints scmInjectionPoint :
UpgradeTestInjectionPoints.values()) {
scmFinalizationExecutor.configureTestInjectionFunction(
scmInjectionPoint,
() -> {
return this.injectSCMFailureDuringSCMUpgrade();
});
for (UpgradeTestInjectionPoints datanodeInjectionPoint :
UpgradeTestInjectionPoints.values()) {
HddsDatanodeService ds = cluster.getHddsDatanodes().get(1);
testPassed.set(true);
Thread dataNodefailureInjectionThread =
injectDataNodeFailureDuringDataNodeUpgrade(ds.getDatanodeDetails());
InjectedUpgradeFinalizationExecutor dataNodeFinalizationExecutor =
new InjectedUpgradeFinalizationExecutor();
dataNodeFinalizationExecutor.configureTestInjectionFunction(
datanodeInjectionPoint, () -> {
dataNodefailureInjectionThread.start();
return true;
});
((BasicUpgradeFinalizer)ds.getDatanodeStateMachine()
.getUpgradeFinalizer())
.setFinalizationExecutor(dataNodeFinalizationExecutor);
testFinalizationWithFailureInjectionHelper(
dataNodefailureInjectionThread);
assertTrue(testPassed.get());
synchronized (cluster) {
shutdown();
init();
}
LOG.info("testAllPossibleDataNodeFailuresAndSCMFailures: " +
"DataNode-Failure-Injection-Point={} with " +
"Scm-FailureInjection-Point={} passed.",
datanodeInjectionPoint.name(), scmInjectionPoint.name());
}
}
}
/*
* Two nodes(SCM and a targeted DataNode together at the same time)
* combination failure case:
* Thread-Contexts :
* SCM-Upgrade-Finalizer-Context
*
* Fail the DataNode and the SCM together when the SCM is going
* through upgrade. This test covers all the combinations of
* SCM-Upgrade-execution points.
*/
@Test
public void testDataNodeAndSCMFailuresTogetherDuringSCMUpgrade()
throws Exception {
for (UpgradeTestInjectionPoints injectionPoint :
UpgradeTestInjectionPoints.values()) {
testPassed.set(true);
Thread helpingFailureInjectionThread =
injectSCMAndDataNodeFailureTogetherAtTheSameTime();
InjectedUpgradeFinalizationExecutor finalizationExecutor =
new InjectedUpgradeFinalizationExecutor();
finalizationExecutor.configureTestInjectionFunction(
injectionPoint, () -> {
helpingFailureInjectionThread.start();
return true;
});
scm.getFinalizationManager().getUpgradeFinalizer()
.setFinalizationExecutor(finalizationExecutor);
testFinalizationWithFailureInjectionHelper(helpingFailureInjectionThread);
assertTrue(testPassed.get());
synchronized (cluster) {
shutdown();
init();
}
LOG.info("testDataNodeAndSCMFailuresTogetherDuringSCMUpgrade: Failure " +
"Injection Point {} passed.", injectionPoint.name());
}
}
/*
* Two nodes(SCM and a targeted DataNode together at the same time)
* combination failure case:
* Thread-Contexts :
* DataNode-Upgrade-Finalizer-Context.
*
* Fail the DataNode and the SCM together when the DataNode is going
* through upgrade. This test covers all the combinations of
* DataNode-Upgrade-execution points.
*/
@Test
public void testDataNodeAndSCMFailuresTogetherDuringDataNodeUpgrade()
throws Exception {
for (UpgradeTestInjectionPoints injectionPoint :
UpgradeTestInjectionPoints.values()) {
testPassed.set(true);
Thread helpingFailureInjectionThread =
injectSCMAndDataNodeFailureTogetherAtTheSameTime();
HddsDatanodeService ds = cluster.getHddsDatanodes().get(1);
InjectedUpgradeFinalizationExecutor dataNodeFinalizationExecutor =
new InjectedUpgradeFinalizationExecutor();
dataNodeFinalizationExecutor.configureTestInjectionFunction(
injectionPoint, () -> {
helpingFailureInjectionThread.start();
return true;
});
((BasicUpgradeFinalizer)ds.getDatanodeStateMachine()
.getUpgradeFinalizer())
.setFinalizationExecutor(dataNodeFinalizationExecutor);
testFinalizationWithFailureInjectionHelper(helpingFailureInjectionThread);
assertTrue(testPassed.get());
synchronized (cluster) {
shutdown();
init();
}
LOG.info("testDataNodeAndSCMFailuresTogetherDuringDataNodeUpgrade: " +
"Failure Injection Point {} passed.", injectionPoint.name());
}
}
public void testFinalizationWithFailureInjectionHelper(
Thread failureInjectionThread) throws Exception {
waitForPipelineCreated();
createTestContainers();
createKey();
// Test the Pre-Upgrade conditions on SCM as well as DataNodes.
TestHddsUpgradeUtils.testPreUpgradeConditionsSCM(
cluster.getStorageContainerManagersList());
TestHddsUpgradeUtils.testPreUpgradeConditionsDataNodes(
cluster.getHddsDatanodes());
// Trigger Finalization on the SCM
StatusAndMessages status =
scm.getFinalizationManager().finalizeUpgrade("xyz");
assertEquals(STARTING_FINALIZATION, status.status());
// Make sure that any outstanding thread created by failure injection
// has completed its job.
if (failureInjectionThread != null) {
failureInjectionThread.join();
}
// Wait for the Finalization to complete on the SCM.
// Failure injection could have restarted the SCM and it could be in
// ALREADY_FINALIZED state as well.
while ((status.status() != FINALIZATION_DONE) &&
(status.status() != ALREADY_FINALIZED)) {
loadSCMState();
status = scm.getFinalizationManager().queryUpgradeFinalizationProgress(
"xyz",
true, false);
if (status.status() == FINALIZATION_REQUIRED) {
status = scm.getFinalizationManager().finalizeUpgrade("xyz");
}
}
// Verify Post-Upgrade conditions on the SCM.
// With failure injection
TestHddsUpgradeUtils.testPostUpgradeConditionsSCM(
cluster.getStorageContainerManagersList(), NUM_CONTAINERS_CREATED,
NUM_DATA_NODES);
// All datanodes on the SCM should have moved to HEALTHY-READONLY state.
// Due to timing constraint also allow a "HEALTHY" state.
loadSCMState();
TestHddsUpgradeUtils.testDataNodesStateOnSCM(
cluster.getStorageContainerManagersList(), NUM_DATA_NODES,
HEALTHY_READONLY, HEALTHY);
// Need to wait for post finalization heartbeat from DNs.
LambdaTestUtils.await(600000, 500, () -> {
try {
loadSCMState();
TestHddsUpgradeUtils.testDataNodesStateOnSCM(
cluster.getStorageContainerManagersList(), NUM_DATA_NODES,
HEALTHY, null);
sleep(100);
} catch (Throwable ex) {
LOG.info(ex.getMessage());
return false;
}
return true;
});
// Verify the SCM has driven all the DataNodes through Layout Upgrade.
TestHddsUpgradeUtils.testPostUpgradeConditionsDataNodes(
cluster.getHddsDatanodes(), NUM_CONTAINERS_CREATED);
// Verify that new pipeline can be created with upgraded datanodes.
try {
testPostUpgradePipelineCreation();
} catch (SCMException e) {
// If pipeline creation fails, make sure that there is a valid reason
// for this i.e. all datanodes are already part of some pipeline.
for (HddsDatanodeService dataNode : cluster.getHddsDatanodes()) {
DatanodeStateMachine dsm = dataNode.getDatanodeStateMachine();
Set<PipelineID> pipelines =
scm.getScmNodeManager().getPipelines(dsm.getDatanodeDetails());
assertNotNull(pipelines);
}
}
}
}
|
googleapis/google-cloud-java | 36,085 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/CreateGlossaryTermRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/business_glossary.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* Creates a new GlossaryTerm under the specified Glossary.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.CreateGlossaryTermRequest}
*/
public final class CreateGlossaryTermRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.CreateGlossaryTermRequest)
CreateGlossaryTermRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateGlossaryTermRequest.newBuilder() to construct.
private CreateGlossaryTermRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateGlossaryTermRequest() {
parent_ = "";
termId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateGlossaryTermRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_CreateGlossaryTermRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_CreateGlossaryTermRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.class,
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TERM_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object termId_ = "";
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The termId.
*/
@java.lang.Override
public java.lang.String getTermId() {
java.lang.Object ref = termId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
termId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for termId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTermIdBytes() {
java.lang.Object ref = termId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
termId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TERM_FIELD_NUMBER = 3;
private com.google.cloud.dataplex.v1.GlossaryTerm term_;
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the term field is set.
*/
@java.lang.Override
public boolean hasTerm() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The term.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.GlossaryTerm getTerm() {
return term_ == null ? com.google.cloud.dataplex.v1.GlossaryTerm.getDefaultInstance() : term_;
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.GlossaryTermOrBuilder getTermOrBuilder() {
return term_ == null ? com.google.cloud.dataplex.v1.GlossaryTerm.getDefaultInstance() : term_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(termId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, termId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getTerm());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(termId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, termId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getTerm());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.CreateGlossaryTermRequest)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest other =
(com.google.cloud.dataplex.v1.CreateGlossaryTermRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getTermId().equals(other.getTermId())) return false;
if (hasTerm() != other.hasTerm()) return false;
if (hasTerm()) {
if (!getTerm().equals(other.getTerm())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + TERM_ID_FIELD_NUMBER;
hash = (53 * hash) + getTermId().hashCode();
if (hasTerm()) {
hash = (37 * hash) + TERM_FIELD_NUMBER;
hash = (53 * hash) + getTerm().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Creates a new GlossaryTerm under the specified Glossary.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.CreateGlossaryTermRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.CreateGlossaryTermRequest)
com.google.cloud.dataplex.v1.CreateGlossaryTermRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_CreateGlossaryTermRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_CreateGlossaryTermRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.class,
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTermFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
termId_ = "";
term_ = null;
if (termBuilder_ != null) {
termBuilder_.dispose();
termBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.BusinessGlossaryProto
.internal_static_google_cloud_dataplex_v1_CreateGlossaryTermRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateGlossaryTermRequest getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateGlossaryTermRequest build() {
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateGlossaryTermRequest buildPartial() {
com.google.cloud.dataplex.v1.CreateGlossaryTermRequest result =
new com.google.cloud.dataplex.v1.CreateGlossaryTermRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataplex.v1.CreateGlossaryTermRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.termId_ = termId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.term_ = termBuilder_ == null ? term_ : termBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.CreateGlossaryTermRequest) {
return mergeFrom((com.google.cloud.dataplex.v1.CreateGlossaryTermRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.CreateGlossaryTermRequest other) {
if (other == com.google.cloud.dataplex.v1.CreateGlossaryTermRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTermId().isEmpty()) {
termId_ = other.termId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasTerm()) {
mergeTerm(other.getTerm());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
termId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getTermFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where the GlossaryTerm will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object termId_ = "";
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The termId.
*/
public java.lang.String getTermId() {
java.lang.Object ref = termId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
termId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for termId.
*/
public com.google.protobuf.ByteString getTermIdBytes() {
java.lang.Object ref = termId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
termId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The termId to set.
* @return This builder for chaining.
*/
public Builder setTermId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
termId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTermId() {
termId_ = getDefaultInstance().getTermId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. GlossaryTerm identifier.
* </pre>
*
* <code>string term_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for termId to set.
* @return This builder for chaining.
*/
public Builder setTermIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
termId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.dataplex.v1.GlossaryTerm term_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.GlossaryTerm,
com.google.cloud.dataplex.v1.GlossaryTerm.Builder,
com.google.cloud.dataplex.v1.GlossaryTermOrBuilder>
termBuilder_;
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the term field is set.
*/
public boolean hasTerm() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The term.
*/
public com.google.cloud.dataplex.v1.GlossaryTerm getTerm() {
if (termBuilder_ == null) {
return term_ == null
? com.google.cloud.dataplex.v1.GlossaryTerm.getDefaultInstance()
: term_;
} else {
return termBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTerm(com.google.cloud.dataplex.v1.GlossaryTerm value) {
if (termBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
term_ = value;
} else {
termBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTerm(com.google.cloud.dataplex.v1.GlossaryTerm.Builder builderForValue) {
if (termBuilder_ == null) {
term_ = builderForValue.build();
} else {
termBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTerm(com.google.cloud.dataplex.v1.GlossaryTerm value) {
if (termBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& term_ != null
&& term_ != com.google.cloud.dataplex.v1.GlossaryTerm.getDefaultInstance()) {
getTermBuilder().mergeFrom(value);
} else {
term_ = value;
}
} else {
termBuilder_.mergeFrom(value);
}
if (term_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTerm() {
bitField0_ = (bitField0_ & ~0x00000004);
term_ = null;
if (termBuilder_ != null) {
termBuilder_.dispose();
termBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.GlossaryTerm.Builder getTermBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTermFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.GlossaryTermOrBuilder getTermOrBuilder() {
if (termBuilder_ != null) {
return termBuilder_.getMessageOrBuilder();
} else {
return term_ == null
? com.google.cloud.dataplex.v1.GlossaryTerm.getDefaultInstance()
: term_;
}
}
/**
*
*
* <pre>
* Required. The GlossaryTerm to create.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.GlossaryTerm term = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.GlossaryTerm,
com.google.cloud.dataplex.v1.GlossaryTerm.Builder,
com.google.cloud.dataplex.v1.GlossaryTermOrBuilder>
getTermFieldBuilder() {
if (termBuilder_ == null) {
termBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.GlossaryTerm,
com.google.cloud.dataplex.v1.GlossaryTerm.Builder,
com.google.cloud.dataplex.v1.GlossaryTermOrBuilder>(
getTerm(), getParentForChildren(), isClean());
term_ = null;
}
return termBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.CreateGlossaryTermRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.CreateGlossaryTermRequest)
private static final com.google.cloud.dataplex.v1.CreateGlossaryTermRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.CreateGlossaryTermRequest();
}
public static com.google.cloud.dataplex.v1.CreateGlossaryTermRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateGlossaryTermRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateGlossaryTermRequest>() {
@java.lang.Override
public CreateGlossaryTermRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateGlossaryTermRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateGlossaryTermRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateGlossaryTermRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,108 | java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/RepairPhase.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/deploy/v1/cloud_deploy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.deploy.v1;
/**
*
*
* <pre>
* RepairPhase tracks the repair attempts that have been made for
* each `RepairPhaseConfig` specified in the `Automation` resource.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.RepairPhase}
*/
public final class RepairPhase extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.RepairPhase)
RepairPhaseOrBuilder {
private static final long serialVersionUID = 0L;
// Use RepairPhase.newBuilder() to construct.
private RepairPhase(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RepairPhase() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RepairPhase();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RepairPhase_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RepairPhase_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.RepairPhase.class,
com.google.cloud.deploy.v1.RepairPhase.Builder.class);
}
private int repairPhaseCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object repairPhase_;
public enum RepairPhaseCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
RETRY(1),
ROLLBACK(2),
REPAIRPHASE_NOT_SET(0);
private final int value;
private RepairPhaseCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RepairPhaseCase valueOf(int value) {
return forNumber(value);
}
public static RepairPhaseCase forNumber(int value) {
switch (value) {
case 1:
return RETRY;
case 2:
return ROLLBACK;
case 0:
return REPAIRPHASE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public RepairPhaseCase getRepairPhaseCase() {
return RepairPhaseCase.forNumber(repairPhaseCase_);
}
public static final int RETRY_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the retry field is set.
*/
@java.lang.Override
public boolean hasRetry() {
return repairPhaseCase_ == 1;
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The retry.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RetryPhase getRetry() {
if (repairPhaseCase_ == 1) {
return (com.google.cloud.deploy.v1.RetryPhase) repairPhase_;
}
return com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance();
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RetryPhaseOrBuilder getRetryOrBuilder() {
if (repairPhaseCase_ == 1) {
return (com.google.cloud.deploy.v1.RetryPhase) repairPhase_;
}
return com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance();
}
public static final int ROLLBACK_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the rollback field is set.
*/
@java.lang.Override
public boolean hasRollback() {
return repairPhaseCase_ == 2;
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The rollback.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RollbackAttempt getRollback() {
if (repairPhaseCase_ == 2) {
return (com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_;
}
return com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance();
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RollbackAttemptOrBuilder getRollbackOrBuilder() {
if (repairPhaseCase_ == 2) {
return (com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_;
}
return com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (repairPhaseCase_ == 1) {
output.writeMessage(1, (com.google.cloud.deploy.v1.RetryPhase) repairPhase_);
}
if (repairPhaseCase_ == 2) {
output.writeMessage(2, (com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (repairPhaseCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.deploy.v1.RetryPhase) repairPhase_);
}
if (repairPhaseCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.deploy.v1.RepairPhase)) {
return super.equals(obj);
}
com.google.cloud.deploy.v1.RepairPhase other = (com.google.cloud.deploy.v1.RepairPhase) obj;
if (!getRepairPhaseCase().equals(other.getRepairPhaseCase())) return false;
switch (repairPhaseCase_) {
case 1:
if (!getRetry().equals(other.getRetry())) return false;
break;
case 2:
if (!getRollback().equals(other.getRollback())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (repairPhaseCase_) {
case 1:
hash = (37 * hash) + RETRY_FIELD_NUMBER;
hash = (53 * hash) + getRetry().hashCode();
break;
case 2:
hash = (37 * hash) + ROLLBACK_FIELD_NUMBER;
hash = (53 * hash) + getRollback().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RepairPhase parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.RepairPhase parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.RepairPhase parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.deploy.v1.RepairPhase prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* RepairPhase tracks the repair attempts that have been made for
* each `RepairPhaseConfig` specified in the `Automation` resource.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.RepairPhase}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.RepairPhase)
com.google.cloud.deploy.v1.RepairPhaseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RepairPhase_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RepairPhase_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.RepairPhase.class,
com.google.cloud.deploy.v1.RepairPhase.Builder.class);
}
// Construct using com.google.cloud.deploy.v1.RepairPhase.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (retryBuilder_ != null) {
retryBuilder_.clear();
}
if (rollbackBuilder_ != null) {
rollbackBuilder_.clear();
}
repairPhaseCase_ = 0;
repairPhase_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RepairPhase_descriptor;
}
@java.lang.Override
public com.google.cloud.deploy.v1.RepairPhase getDefaultInstanceForType() {
return com.google.cloud.deploy.v1.RepairPhase.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.deploy.v1.RepairPhase build() {
com.google.cloud.deploy.v1.RepairPhase result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.deploy.v1.RepairPhase buildPartial() {
com.google.cloud.deploy.v1.RepairPhase result =
new com.google.cloud.deploy.v1.RepairPhase(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.deploy.v1.RepairPhase result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.deploy.v1.RepairPhase result) {
result.repairPhaseCase_ = repairPhaseCase_;
result.repairPhase_ = this.repairPhase_;
if (repairPhaseCase_ == 1 && retryBuilder_ != null) {
result.repairPhase_ = retryBuilder_.build();
}
if (repairPhaseCase_ == 2 && rollbackBuilder_ != null) {
result.repairPhase_ = rollbackBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.deploy.v1.RepairPhase) {
return mergeFrom((com.google.cloud.deploy.v1.RepairPhase) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.deploy.v1.RepairPhase other) {
if (other == com.google.cloud.deploy.v1.RepairPhase.getDefaultInstance()) return this;
switch (other.getRepairPhaseCase()) {
case RETRY:
{
mergeRetry(other.getRetry());
break;
}
case ROLLBACK:
{
mergeRollback(other.getRollback());
break;
}
case REPAIRPHASE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRetryFieldBuilder().getBuilder(), extensionRegistry);
repairPhaseCase_ = 1;
break;
} // case 10
case 18:
{
input.readMessage(getRollbackFieldBuilder().getBuilder(), extensionRegistry);
repairPhaseCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int repairPhaseCase_ = 0;
private java.lang.Object repairPhase_;
public RepairPhaseCase getRepairPhaseCase() {
return RepairPhaseCase.forNumber(repairPhaseCase_);
}
public Builder clearRepairPhase() {
repairPhaseCase_ = 0;
repairPhase_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.RetryPhase,
com.google.cloud.deploy.v1.RetryPhase.Builder,
com.google.cloud.deploy.v1.RetryPhaseOrBuilder>
retryBuilder_;
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the retry field is set.
*/
@java.lang.Override
public boolean hasRetry() {
return repairPhaseCase_ == 1;
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The retry.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RetryPhase getRetry() {
if (retryBuilder_ == null) {
if (repairPhaseCase_ == 1) {
return (com.google.cloud.deploy.v1.RetryPhase) repairPhase_;
}
return com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance();
} else {
if (repairPhaseCase_ == 1) {
return retryBuilder_.getMessage();
}
return com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setRetry(com.google.cloud.deploy.v1.RetryPhase value) {
if (retryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
repairPhase_ = value;
onChanged();
} else {
retryBuilder_.setMessage(value);
}
repairPhaseCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setRetry(com.google.cloud.deploy.v1.RetryPhase.Builder builderForValue) {
if (retryBuilder_ == null) {
repairPhase_ = builderForValue.build();
onChanged();
} else {
retryBuilder_.setMessage(builderForValue.build());
}
repairPhaseCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeRetry(com.google.cloud.deploy.v1.RetryPhase value) {
if (retryBuilder_ == null) {
if (repairPhaseCase_ == 1
&& repairPhase_ != com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance()) {
repairPhase_ =
com.google.cloud.deploy.v1.RetryPhase.newBuilder(
(com.google.cloud.deploy.v1.RetryPhase) repairPhase_)
.mergeFrom(value)
.buildPartial();
} else {
repairPhase_ = value;
}
onChanged();
} else {
if (repairPhaseCase_ == 1) {
retryBuilder_.mergeFrom(value);
} else {
retryBuilder_.setMessage(value);
}
}
repairPhaseCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearRetry() {
if (retryBuilder_ == null) {
if (repairPhaseCase_ == 1) {
repairPhaseCase_ = 0;
repairPhase_ = null;
onChanged();
}
} else {
if (repairPhaseCase_ == 1) {
repairPhaseCase_ = 0;
repairPhase_ = null;
}
retryBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.deploy.v1.RetryPhase.Builder getRetryBuilder() {
return getRetryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RetryPhaseOrBuilder getRetryOrBuilder() {
if ((repairPhaseCase_ == 1) && (retryBuilder_ != null)) {
return retryBuilder_.getMessageOrBuilder();
} else {
if (repairPhaseCase_ == 1) {
return (com.google.cloud.deploy.v1.RetryPhase) repairPhase_;
}
return com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Output only. Records of the retry attempts for retry repair mode.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RetryPhase retry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.RetryPhase,
com.google.cloud.deploy.v1.RetryPhase.Builder,
com.google.cloud.deploy.v1.RetryPhaseOrBuilder>
getRetryFieldBuilder() {
if (retryBuilder_ == null) {
if (!(repairPhaseCase_ == 1)) {
repairPhase_ = com.google.cloud.deploy.v1.RetryPhase.getDefaultInstance();
}
retryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.RetryPhase,
com.google.cloud.deploy.v1.RetryPhase.Builder,
com.google.cloud.deploy.v1.RetryPhaseOrBuilder>(
(com.google.cloud.deploy.v1.RetryPhase) repairPhase_,
getParentForChildren(),
isClean());
repairPhase_ = null;
}
repairPhaseCase_ = 1;
onChanged();
return retryBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.RollbackAttempt,
com.google.cloud.deploy.v1.RollbackAttempt.Builder,
com.google.cloud.deploy.v1.RollbackAttemptOrBuilder>
rollbackBuilder_;
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the rollback field is set.
*/
@java.lang.Override
public boolean hasRollback() {
return repairPhaseCase_ == 2;
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The rollback.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RollbackAttempt getRollback() {
if (rollbackBuilder_ == null) {
if (repairPhaseCase_ == 2) {
return (com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_;
}
return com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance();
} else {
if (repairPhaseCase_ == 2) {
return rollbackBuilder_.getMessage();
}
return com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setRollback(com.google.cloud.deploy.v1.RollbackAttempt value) {
if (rollbackBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
repairPhase_ = value;
onChanged();
} else {
rollbackBuilder_.setMessage(value);
}
repairPhaseCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setRollback(com.google.cloud.deploy.v1.RollbackAttempt.Builder builderForValue) {
if (rollbackBuilder_ == null) {
repairPhase_ = builderForValue.build();
onChanged();
} else {
rollbackBuilder_.setMessage(builderForValue.build());
}
repairPhaseCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeRollback(com.google.cloud.deploy.v1.RollbackAttempt value) {
if (rollbackBuilder_ == null) {
if (repairPhaseCase_ == 2
&& repairPhase_ != com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance()) {
repairPhase_ =
com.google.cloud.deploy.v1.RollbackAttempt.newBuilder(
(com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_)
.mergeFrom(value)
.buildPartial();
} else {
repairPhase_ = value;
}
onChanged();
} else {
if (repairPhaseCase_ == 2) {
rollbackBuilder_.mergeFrom(value);
} else {
rollbackBuilder_.setMessage(value);
}
}
repairPhaseCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearRollback() {
if (rollbackBuilder_ == null) {
if (repairPhaseCase_ == 2) {
repairPhaseCase_ = 0;
repairPhase_ = null;
onChanged();
}
} else {
if (repairPhaseCase_ == 2) {
repairPhaseCase_ = 0;
repairPhase_ = null;
}
rollbackBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.deploy.v1.RollbackAttempt.Builder getRollbackBuilder() {
return getRollbackFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RollbackAttemptOrBuilder getRollbackOrBuilder() {
if ((repairPhaseCase_ == 2) && (rollbackBuilder_ != null)) {
return rollbackBuilder_.getMessageOrBuilder();
} else {
if (repairPhaseCase_ == 2) {
return (com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_;
}
return com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Output only. Rollback attempt for rollback repair mode .
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RollbackAttempt rollback = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.RollbackAttempt,
com.google.cloud.deploy.v1.RollbackAttempt.Builder,
com.google.cloud.deploy.v1.RollbackAttemptOrBuilder>
getRollbackFieldBuilder() {
if (rollbackBuilder_ == null) {
if (!(repairPhaseCase_ == 2)) {
repairPhase_ = com.google.cloud.deploy.v1.RollbackAttempt.getDefaultInstance();
}
rollbackBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.RollbackAttempt,
com.google.cloud.deploy.v1.RollbackAttempt.Builder,
com.google.cloud.deploy.v1.RollbackAttemptOrBuilder>(
(com.google.cloud.deploy.v1.RollbackAttempt) repairPhase_,
getParentForChildren(),
isClean());
repairPhase_ = null;
}
repairPhaseCase_ = 2;
onChanged();
return rollbackBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.RepairPhase)
}
// @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.RepairPhase)
private static final com.google.cloud.deploy.v1.RepairPhase DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.RepairPhase();
}
public static com.google.cloud.deploy.v1.RepairPhase getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RepairPhase> PARSER =
new com.google.protobuf.AbstractParser<RepairPhase>() {
@java.lang.Override
public RepairPhase parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RepairPhase> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RepairPhase> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.deploy.v1.RepairPhase getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,275 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/DiscoveryOtherCloudGenerationCadence.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* How often existing resources should have their profiles refreshed.
* New resources are scanned as quickly as possible depending on system
* capacity.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence}
*/
public final class DiscoveryOtherCloudGenerationCadence
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence)
DiscoveryOtherCloudGenerationCadenceOrBuilder {
private static final long serialVersionUID = 0L;
// Use DiscoveryOtherCloudGenerationCadence.newBuilder() to construct.
private DiscoveryOtherCloudGenerationCadence(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DiscoveryOtherCloudGenerationCadence() {
refreshFrequency_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DiscoveryOtherCloudGenerationCadence();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudGenerationCadence_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudGenerationCadence_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.class,
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.Builder.class);
}
private int bitField0_;
public static final int REFRESH_FREQUENCY_FIELD_NUMBER = 1;
private int refreshFrequency_ = 0;
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for refreshFrequency.
*/
@java.lang.Override
public int getRefreshFrequencyValue() {
return refreshFrequency_;
}
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The refreshFrequency.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DataProfileUpdateFrequency getRefreshFrequency() {
com.google.privacy.dlp.v2.DataProfileUpdateFrequency result =
com.google.privacy.dlp.v2.DataProfileUpdateFrequency.forNumber(refreshFrequency_);
return result == null
? com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UNRECOGNIZED
: result;
}
public static final int INSPECT_TEMPLATE_MODIFIED_CADENCE_FIELD_NUMBER = 2;
private com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
inspectTemplateModifiedCadence_;
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the inspectTemplateModifiedCadence field is set.
*/
@java.lang.Override
public boolean hasInspectTemplateModifiedCadence() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The inspectTemplateModifiedCadence.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
getInspectTemplateModifiedCadence() {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder
getInspectTemplateModifiedCadenceOrBuilder() {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (refreshFrequency_
!= com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UPDATE_FREQUENCY_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, refreshFrequency_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getInspectTemplateModifiedCadence());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (refreshFrequency_
!= com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UPDATE_FREQUENCY_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, refreshFrequency_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getInspectTemplateModifiedCadence());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence other =
(com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence) obj;
if (refreshFrequency_ != other.refreshFrequency_) return false;
if (hasInspectTemplateModifiedCadence() != other.hasInspectTemplateModifiedCadence())
return false;
if (hasInspectTemplateModifiedCadence()) {
if (!getInspectTemplateModifiedCadence().equals(other.getInspectTemplateModifiedCadence()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + REFRESH_FREQUENCY_FIELD_NUMBER;
hash = (53 * hash) + refreshFrequency_;
if (hasInspectTemplateModifiedCadence()) {
hash = (37 * hash) + INSPECT_TEMPLATE_MODIFIED_CADENCE_FIELD_NUMBER;
hash = (53 * hash) + getInspectTemplateModifiedCadence().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* How often existing resources should have their profiles refreshed.
* New resources are scanned as quickly as possible depending on system
* capacity.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence)
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudGenerationCadence_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudGenerationCadence_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.class,
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInspectTemplateModifiedCadenceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
refreshFrequency_ = 0;
inspectTemplateModifiedCadence_ = null;
if (inspectTemplateModifiedCadenceBuilder_ != null) {
inspectTemplateModifiedCadenceBuilder_.dispose();
inspectTemplateModifiedCadenceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryOtherCloudGenerationCadence_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence
getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence build() {
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence buildPartial() {
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence result =
new com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.refreshFrequency_ = refreshFrequency_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.inspectTemplateModifiedCadence_ =
inspectTemplateModifiedCadenceBuilder_ == null
? inspectTemplateModifiedCadence_
: inspectTemplateModifiedCadenceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence) {
return mergeFrom((com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence other) {
if (other
== com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence.getDefaultInstance())
return this;
if (other.refreshFrequency_ != 0) {
setRefreshFrequencyValue(other.getRefreshFrequencyValue());
}
if (other.hasInspectTemplateModifiedCadence()) {
mergeInspectTemplateModifiedCadence(other.getInspectTemplateModifiedCadence());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
refreshFrequency_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(
getInspectTemplateModifiedCadenceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int refreshFrequency_ = 0;
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for refreshFrequency.
*/
@java.lang.Override
public int getRefreshFrequencyValue() {
return refreshFrequency_;
}
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for refreshFrequency to set.
* @return This builder for chaining.
*/
public Builder setRefreshFrequencyValue(int value) {
refreshFrequency_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The refreshFrequency.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DataProfileUpdateFrequency getRefreshFrequency() {
com.google.privacy.dlp.v2.DataProfileUpdateFrequency result =
com.google.privacy.dlp.v2.DataProfileUpdateFrequency.forNumber(refreshFrequency_);
return result == null
? com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The refreshFrequency to set.
* @return This builder for chaining.
*/
public Builder setRefreshFrequency(com.google.privacy.dlp.v2.DataProfileUpdateFrequency value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
refreshFrequency_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Frequency to update profiles regardless of whether the underlying
* resource has changes. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRefreshFrequency() {
bitField0_ = (bitField0_ & ~0x00000001);
refreshFrequency_ = 0;
onChanged();
return this;
}
private com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
inspectTemplateModifiedCadence_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder>
inspectTemplateModifiedCadenceBuilder_;
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the inspectTemplateModifiedCadence field is set.
*/
public boolean hasInspectTemplateModifiedCadence() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The inspectTemplateModifiedCadence.
*/
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
getInspectTemplateModifiedCadence() {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
} else {
return inspectTemplateModifiedCadenceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setInspectTemplateModifiedCadence(
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence value) {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
inspectTemplateModifiedCadence_ = value;
} else {
inspectTemplateModifiedCadenceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setInspectTemplateModifiedCadence(
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder builderForValue) {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
inspectTemplateModifiedCadence_ = builderForValue.build();
} else {
inspectTemplateModifiedCadenceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeInspectTemplateModifiedCadence(
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence value) {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& inspectTemplateModifiedCadence_ != null
&& inspectTemplateModifiedCadence_
!= com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
.getDefaultInstance()) {
getInspectTemplateModifiedCadenceBuilder().mergeFrom(value);
} else {
inspectTemplateModifiedCadence_ = value;
}
} else {
inspectTemplateModifiedCadenceBuilder_.mergeFrom(value);
}
if (inspectTemplateModifiedCadence_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearInspectTemplateModifiedCadence() {
bitField0_ = (bitField0_ & ~0x00000002);
inspectTemplateModifiedCadence_ = null;
if (inspectTemplateModifiedCadenceBuilder_ != null) {
inspectTemplateModifiedCadenceBuilder_.dispose();
inspectTemplateModifiedCadenceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder
getInspectTemplateModifiedCadenceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInspectTemplateModifiedCadenceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder
getInspectTemplateModifiedCadenceOrBuilder() {
if (inspectTemplateModifiedCadenceBuilder_ != null) {
return inspectTemplateModifiedCadenceBuilder_.getMessageOrBuilder();
} else {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
}
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder>
getInspectTemplateModifiedCadenceFieldBuilder() {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
inspectTemplateModifiedCadenceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder>(
getInspectTemplateModifiedCadence(), getParentForChildren(), isClean());
inspectTemplateModifiedCadence_ = null;
}
return inspectTemplateModifiedCadenceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence)
private static final com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence();
}
public static com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DiscoveryOtherCloudGenerationCadence> PARSER =
new com.google.protobuf.AbstractParser<DiscoveryOtherCloudGenerationCadence>() {
@java.lang.Override
public DiscoveryOtherCloudGenerationCadence parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DiscoveryOtherCloudGenerationCadence> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DiscoveryOtherCloudGenerationCadence> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryOtherCloudGenerationCadence
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,105 | java-memcache/proto-google-cloud-memcache-v1beta2/src/main/java/com/google/cloud/memcache/v1beta2/WeeklyMaintenanceWindow.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/memcache/v1beta2/cloud_memcache.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.memcache.v1beta2;
/**
*
*
* <pre>
* Time window specified for weekly operations.
* </pre>
*
* Protobuf type {@code google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow}
*/
public final class WeeklyMaintenanceWindow extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow)
WeeklyMaintenanceWindowOrBuilder {
private static final long serialVersionUID = 0L;
// Use WeeklyMaintenanceWindow.newBuilder() to construct.
private WeeklyMaintenanceWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WeeklyMaintenanceWindow() {
day_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WeeklyMaintenanceWindow();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.memcache.v1beta2.CloudMemcacheProto
.internal_static_google_cloud_memcache_v1beta2_WeeklyMaintenanceWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.memcache.v1beta2.CloudMemcacheProto
.internal_static_google_cloud_memcache_v1beta2_WeeklyMaintenanceWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.class,
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.Builder.class);
}
private int bitField0_;
public static final int DAY_FIELD_NUMBER = 1;
private int day_ = 0;
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The enum numeric value on the wire for day.
*/
@java.lang.Override
public int getDayValue() {
return day_;
}
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The day.
*/
@java.lang.Override
public com.google.type.DayOfWeek getDay() {
com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_);
return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result;
}
public static final int START_TIME_FIELD_NUMBER = 2;
private com.google.type.TimeOfDay startTime_;
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the startTime field is set.
*/
@java.lang.Override
public boolean hasStartTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The startTime.
*/
@java.lang.Override
public com.google.type.TimeOfDay getStartTime() {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
}
public static final int DURATION_FIELD_NUMBER = 3;
private com.google.protobuf.Duration duration_;
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the duration field is set.
*/
@java.lang.Override
public boolean hasDuration() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The duration.
*/
@java.lang.Override
public com.google.protobuf.Duration getDuration() {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) {
output.writeEnum(1, day_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getStartTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getDuration());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, day_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStartTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDuration());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow)) {
return super.equals(obj);
}
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow other =
(com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow) obj;
if (day_ != other.day_) return false;
if (hasStartTime() != other.hasStartTime()) return false;
if (hasStartTime()) {
if (!getStartTime().equals(other.getStartTime())) return false;
}
if (hasDuration() != other.hasDuration()) return false;
if (hasDuration()) {
if (!getDuration().equals(other.getDuration())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DAY_FIELD_NUMBER;
hash = (53 * hash) + day_;
if (hasStartTime()) {
hash = (37 * hash) + START_TIME_FIELD_NUMBER;
hash = (53 * hash) + getStartTime().hashCode();
}
if (hasDuration()) {
hash = (37 * hash) + DURATION_FIELD_NUMBER;
hash = (53 * hash) + getDuration().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Time window specified for weekly operations.
* </pre>
*
* Protobuf type {@code google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow)
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindowOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.memcache.v1beta2.CloudMemcacheProto
.internal_static_google_cloud_memcache_v1beta2_WeeklyMaintenanceWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.memcache.v1beta2.CloudMemcacheProto
.internal_static_google_cloud_memcache_v1beta2_WeeklyMaintenanceWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.class,
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.Builder.class);
}
// Construct using com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getStartTimeFieldBuilder();
getDurationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
day_ = 0;
startTime_ = null;
if (startTimeBuilder_ != null) {
startTimeBuilder_.dispose();
startTimeBuilder_ = null;
}
duration_ = null;
if (durationBuilder_ != null) {
durationBuilder_.dispose();
durationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.memcache.v1beta2.CloudMemcacheProto
.internal_static_google_cloud_memcache_v1beta2_WeeklyMaintenanceWindow_descriptor;
}
@java.lang.Override
public com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow getDefaultInstanceForType() {
return com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow build() {
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow buildPartial() {
com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow result =
new com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.day_ = day_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.duration_ = durationBuilder_ == null ? duration_ : durationBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow) {
return mergeFrom((com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow other) {
if (other == com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow.getDefaultInstance())
return this;
if (other.day_ != 0) {
setDayValue(other.getDayValue());
}
if (other.hasStartTime()) {
mergeStartTime(other.getStartTime());
}
if (other.hasDuration()) {
mergeDuration(other.getDuration());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
day_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getDurationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int day_ = 0;
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The enum numeric value on the wire for day.
*/
@java.lang.Override
public int getDayValue() {
return day_;
}
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The enum numeric value on the wire for day to set.
* @return This builder for chaining.
*/
public Builder setDayValue(int value) {
day_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The day.
*/
@java.lang.Override
public com.google.type.DayOfWeek getDay() {
com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_);
return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The day to set.
* @return This builder for chaining.
*/
public Builder setDay(com.google.type.DayOfWeek value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
day_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Allows to define schedule that runs specified day of the week.
* </pre>
*
* <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDay() {
bitField0_ = (bitField0_ & ~0x00000001);
day_ = 0;
onChanged();
return this;
}
private com.google.type.TimeOfDay startTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.type.TimeOfDay,
com.google.type.TimeOfDay.Builder,
com.google.type.TimeOfDayOrBuilder>
startTimeBuilder_;
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the startTime field is set.
*/
public boolean hasStartTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The startTime.
*/
public com.google.type.TimeOfDay getStartTime() {
if (startTimeBuilder_ == null) {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
} else {
return startTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setStartTime(com.google.type.TimeOfDay value) {
if (startTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
startTime_ = value;
} else {
startTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setStartTime(com.google.type.TimeOfDay.Builder builderForValue) {
if (startTimeBuilder_ == null) {
startTime_ = builderForValue.build();
} else {
startTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder mergeStartTime(com.google.type.TimeOfDay value) {
if (startTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& startTime_ != null
&& startTime_ != com.google.type.TimeOfDay.getDefaultInstance()) {
getStartTimeBuilder().mergeFrom(value);
} else {
startTime_ = value;
}
} else {
startTimeBuilder_.mergeFrom(value);
}
if (startTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearStartTime() {
bitField0_ = (bitField0_ & ~0x00000002);
startTime_ = null;
if (startTimeBuilder_ != null) {
startTimeBuilder_.dispose();
startTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.type.TimeOfDay.Builder getStartTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStartTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() {
if (startTimeBuilder_ != null) {
return startTimeBuilder_.getMessageOrBuilder();
} else {
return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_;
}
}
/**
*
*
* <pre>
* Required. Start time of the window in UTC.
* </pre>
*
* <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.type.TimeOfDay,
com.google.type.TimeOfDay.Builder,
com.google.type.TimeOfDayOrBuilder>
getStartTimeFieldBuilder() {
if (startTimeBuilder_ == null) {
startTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.type.TimeOfDay,
com.google.type.TimeOfDay.Builder,
com.google.type.TimeOfDayOrBuilder>(
getStartTime(), getParentForChildren(), isClean());
startTime_ = null;
}
return startTimeBuilder_;
}
private com.google.protobuf.Duration duration_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
durationBuilder_;
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the duration field is set.
*/
public boolean hasDuration() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The duration.
*/
public com.google.protobuf.Duration getDuration() {
if (durationBuilder_ == null) {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
} else {
return durationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDuration(com.google.protobuf.Duration value) {
if (durationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
duration_ = value;
} else {
durationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDuration(com.google.protobuf.Duration.Builder builderForValue) {
if (durationBuilder_ == null) {
duration_ = builderForValue.build();
} else {
durationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDuration(com.google.protobuf.Duration value) {
if (durationBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& duration_ != null
&& duration_ != com.google.protobuf.Duration.getDefaultInstance()) {
getDurationBuilder().mergeFrom(value);
} else {
duration_ = value;
}
} else {
durationBuilder_.mergeFrom(value);
}
if (duration_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDuration() {
bitField0_ = (bitField0_ & ~0x00000004);
duration_ = null;
if (durationBuilder_ != null) {
durationBuilder_.dispose();
durationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.Duration.Builder getDurationBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getDurationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() {
if (durationBuilder_ != null) {
return durationBuilder_.getMessageOrBuilder();
} else {
return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_;
}
}
/**
*
*
* <pre>
* Required. Duration of the time window.
* </pre>
*
* <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getDurationFieldBuilder() {
if (durationBuilder_ == null) {
durationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getDuration(), getParentForChildren(), isClean());
duration_ = null;
}
return durationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow)
}
// @@protoc_insertion_point(class_scope:google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow)
private static final com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow();
}
public static com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WeeklyMaintenanceWindow> PARSER =
new com.google.protobuf.AbstractParser<WeeklyMaintenanceWindow>() {
@java.lang.Override
public WeeklyMaintenanceWindow parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<WeeklyMaintenanceWindow> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WeeklyMaintenanceWindow> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.memcache.v1beta2.WeeklyMaintenanceWindow getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/zetasql | 36,449 | javatests/com/google/zetasql/FunctionArgumentTypeTest.java | /*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.zetasql;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assert.assertThrows;
import com.google.zetasql.FunctionArgumentType.FunctionArgumentTypeOptions;
import com.google.zetasql.FunctionProtos.FunctionArgumentTypeOptionsProto;
import com.google.zetasql.FunctionProtos.FunctionArgumentTypeProto;
import com.google.zetasql.ZetaSQLFunctions.FunctionEnums;
import com.google.zetasql.ZetaSQLFunctions.FunctionEnums.ArgumentCardinality;
import com.google.zetasql.ZetaSQLFunctions.FunctionEnums.NamedArgumentKind;
import com.google.zetasql.ZetaSQLFunctions.FunctionEnums.ProcedureArgumentMode;
import com.google.zetasql.ZetaSQLFunctions.SignatureArgumentKind;
import com.google.zetasql.ZetaSQLType.TypeKind;
import com.google.zetasql.ZetaSQLType.TypeProto;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class FunctionArgumentTypeTest {
@Test
public void testFixedType() {
FunctionArgumentType fixedTypeInt32 =
new FunctionArgumentType(TypeFactory.createSimpleType(TypeKind.TYPE_INT32));
assertThat(fixedTypeInt32.isConcrete()).isFalse();
assertThat(fixedTypeInt32.getType()).isNotNull();
assertThat(fixedTypeInt32.isRepeated()).isFalse();
assertThat(fixedTypeInt32.isOptional()).isFalse();
assertThat(fixedTypeInt32.isRequired()).isTrue();
assertThat(fixedTypeInt32.getNumOccurrences()).isEqualTo(-1);
assertThat(fixedTypeInt32.debugString()).isEqualTo("INT32");
FunctionArgumentType concreteFixedType =
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT32), ArgumentCardinality.REQUIRED, 0);
assertThat(concreteFixedType.isConcrete()).isTrue();
assertThat(concreteFixedType.getType()).isNotNull();
assertThat(concreteFixedType.isRepeated()).isFalse();
assertThat(concreteFixedType.isOptional()).isFalse();
assertThat(concreteFixedType.isRequired()).isTrue();
assertThat(concreteFixedType.getNumOccurrences()).isEqualTo(0);
assertThat(concreteFixedType.debugString()).isEqualTo("INT32");
FunctionArgumentType repeatedFixedType =
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT32), ArgumentCardinality.REPEATED, 1);
assertThat(repeatedFixedType.isConcrete()).isTrue();
assertThat(repeatedFixedType.getType()).isNotNull();
assertThat(repeatedFixedType.isRepeated()).isTrue();
assertThat(repeatedFixedType.isOptional()).isFalse();
assertThat(repeatedFixedType.isRequired()).isFalse();
assertThat(repeatedFixedType.getNumOccurrences()).isEqualTo(1);
assertThat(repeatedFixedType.debugString()).isEqualTo("repeated(1) INT32");
FunctionArgumentType optionalFixedType =
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT32), ArgumentCardinality.OPTIONAL, 1);
assertThat(optionalFixedType.isConcrete()).isTrue();
assertThat(optionalFixedType.getType()).isNotNull();
assertThat(optionalFixedType.isRepeated()).isFalse();
assertThat(optionalFixedType.isOptional()).isTrue();
assertThat(optionalFixedType.isRequired()).isFalse();
assertThat(optionalFixedType.getNumOccurrences()).isEqualTo(1);
assertThat(optionalFixedType.debugString()).isEqualTo("optional(1) INT32");
FunctionArgumentType typeNull =
new FunctionArgumentType((Type) null, ArgumentCardinality.REPEATED, -1);
assertThat(typeNull.isConcrete()).isFalse();
assertThat(typeNull.getType()).isNull();
assertThat(typeNull.isRepeated()).isTrue();
assertThat(typeNull.isOptional()).isFalse();
assertThat(typeNull.isRequired()).isFalse();
assertThat(typeNull.getNumOccurrences()).isEqualTo(-1);
assertThat(typeNull.debugString()).isEqualTo("repeated FIXED");
}
// TODO: Break this test into multiple smaller unit tests.
@Test
public void testNotFixedType() {
FunctionArgumentType arrayTypeAny1 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_1, ArgumentCardinality.REPEATED, 0);
assertThat(arrayTypeAny1.isConcrete()).isFalse();
assertThat(arrayTypeAny1.getType()).isNull();
assertThat(arrayTypeAny1.isRepeated()).isTrue();
assertThat(arrayTypeAny1.debugString()).isEqualTo("repeated <array<T1>>");
FunctionArgumentType arrayTypeAny2 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_2, ArgumentCardinality.REQUIRED, 1);
assertThat(arrayTypeAny2.isConcrete()).isFalse();
assertThat(arrayTypeAny2.getType()).isNull();
assertThat(arrayTypeAny2.isRepeated()).isFalse();
assertThat(arrayTypeAny2.debugString()).isEqualTo("<array<T2>>");
FunctionArgumentType enumAny =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ENUM_ANY, ArgumentCardinality.OPTIONAL, -1);
assertThat(enumAny.isConcrete()).isFalse();
assertThat(enumAny.getType()).isNull();
assertThat(enumAny.isRepeated()).isFalse();
assertThat(enumAny.debugString()).isEqualTo("optional <enum>");
FunctionArgumentType protoAny =
new FunctionArgumentType(
SignatureArgumentKind.ARG_PROTO_ANY, ArgumentCardinality.OPTIONAL, 3);
assertThat(protoAny.isConcrete()).isFalse();
assertThat(protoAny.getType()).isNull();
assertThat(protoAny.isRepeated()).isFalse();
assertThat(protoAny.debugString()).isEqualTo("optional <proto>");
FunctionArgumentType structAny =
new FunctionArgumentType(
SignatureArgumentKind.ARG_STRUCT_ANY, ArgumentCardinality.OPTIONAL, 0);
assertThat(structAny.isConcrete()).isFalse();
assertThat(structAny.getType()).isNull();
assertThat(structAny.isRepeated()).isFalse();
assertThat(structAny.debugString()).isEqualTo("optional <struct>");
FunctionArgumentType typeAny1 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_1, ArgumentCardinality.REQUIRED, 0);
assertThat(typeAny1.isConcrete()).isFalse();
assertThat(typeAny1.getType()).isNull();
assertThat(typeAny1.isRepeated()).isFalse();
assertThat(typeAny1.debugString()).isEqualTo("<T1>");
FunctionArgumentType typeAny2 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_2, ArgumentCardinality.REQUIRED, 2);
assertThat(typeAny2.isConcrete()).isFalse();
assertThat(typeAny2.getType()).isNull();
assertThat(typeAny2.isRepeated()).isFalse();
assertThat(typeAny2.debugString()).isEqualTo("<T2>");
FunctionArgumentType typeAny3 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_3, ArgumentCardinality.REQUIRED, 2);
assertThat(typeAny3.isConcrete()).isFalse();
assertThat(typeAny3.getType()).isNull();
assertThat(typeAny3.isRepeated()).isFalse();
assertThat(typeAny3.debugString()).isEqualTo("<T3>");
FunctionArgumentType arrayTypeAny3 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_3, ArgumentCardinality.REQUIRED, 2);
assertThat(arrayTypeAny3.isConcrete()).isFalse();
assertThat(arrayTypeAny3.getType()).isNull();
assertThat(arrayTypeAny3.isRepeated()).isFalse();
assertThat(arrayTypeAny3.debugString()).isEqualTo("<array<T3>>");
FunctionArgumentType typeAny4 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_4, ArgumentCardinality.REQUIRED, 2);
assertThat(typeAny4.isConcrete()).isFalse();
assertThat(typeAny4.getType()).isNull();
assertThat(typeAny4.isRepeated()).isFalse();
assertThat(typeAny4.debugString()).isEqualTo("<T4>");
FunctionArgumentType arrayTypeAny4 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_4, ArgumentCardinality.REQUIRED, 2);
assertThat(arrayTypeAny4.isConcrete()).isFalse();
assertThat(arrayTypeAny4.getType()).isNull();
assertThat(arrayTypeAny4.isRepeated()).isFalse();
assertThat(arrayTypeAny4.debugString()).isEqualTo("<array<T4>>");
FunctionArgumentType typeAny5 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_5, ArgumentCardinality.REQUIRED, 2);
assertThat(typeAny5.isConcrete()).isFalse();
assertThat(typeAny5.getType()).isNull();
assertThat(typeAny5.isRepeated()).isFalse();
assertThat(typeAny5.debugString()).isEqualTo("<T5>");
FunctionArgumentType arrayTypeAny5 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_5, ArgumentCardinality.REQUIRED, 2);
assertThat(arrayTypeAny5.isConcrete()).isFalse();
assertThat(arrayTypeAny5.getType()).isNull();
assertThat(arrayTypeAny5.isRepeated()).isFalse();
assertThat(arrayTypeAny5.debugString()).isEqualTo("<array<T5>>");
FunctionArgumentType typeArbitrary =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ARBITRARY, ArgumentCardinality.REQUIRED, -1);
assertThat(typeArbitrary.isConcrete()).isFalse();
assertThat(typeArbitrary.getType()).isNull();
assertThat(typeArbitrary.isRepeated()).isFalse();
assertThat(typeArbitrary.debugString()).isEqualTo("ANY TYPE");
FunctionArgumentType typeUnknown =
new FunctionArgumentType(
SignatureArgumentKind.__SignatureArgumentKind__switch_must_have_a_default__,
ArgumentCardinality.REPEATED,
1);
assertThat(typeUnknown.isConcrete()).isFalse();
assertThat(typeUnknown.getType()).isNull();
assertThat(typeUnknown.isRepeated()).isTrue();
assertThat(typeUnknown.debugString()).isEqualTo("repeated UNKNOWN_ARG_KIND");
}
@Test
public void testNotFixedGraphTypes() {
FunctionArgumentType graphNode =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_GRAPH_NODE, ArgumentCardinality.REPEATED, 0);
assertThat(graphNode.isConcrete()).isFalse();
assertThat(graphNode.getType()).isNull();
assertThat(graphNode.isRepeated()).isTrue();
assertThat(graphNode.debugString()).isEqualTo("repeated <graph_node>");
FunctionArgumentType graphEdge =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_GRAPH_EDGE, ArgumentCardinality.REPEATED, 0);
assertThat(graphEdge.isConcrete()).isFalse();
assertThat(graphEdge.getType()).isNull();
assertThat(graphEdge.isRepeated()).isTrue();
assertThat(graphEdge.debugString()).isEqualTo("repeated <graph_edge>");
FunctionArgumentType graphElement =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_GRAPH_ELEMENT, ArgumentCardinality.REPEATED, 0);
assertThat(graphElement.isConcrete()).isFalse();
assertThat(graphElement.getType()).isNull();
assertThat(graphElement.isRepeated()).isTrue();
assertThat(graphElement.debugString()).isEqualTo("repeated <graph_element>");
FunctionArgumentType graphPath =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_GRAPH_PATH, ArgumentCardinality.REPEATED, 0);
assertThat(graphPath.isConcrete()).isFalse();
assertThat(graphPath.getType()).isNull();
assertThat(graphPath.isRepeated()).isTrue();
assertThat(graphPath.debugString()).isEqualTo("repeated <graph_path>");
}
@Test
public void testLambdaArgumentType() {
SimpleType boolType = TypeFactory.createSimpleType(TypeKind.TYPE_BOOL);
FunctionArgumentType boolArgType = new FunctionArgumentType(boolType);
SimpleType int64Type = TypeFactory.createSimpleType(TypeKind.TYPE_INT64);
FunctionArgumentType int64ArgType = new FunctionArgumentType(int64Type);
FunctionArgumentType t1ArgType = new FunctionArgumentType(SignatureArgumentKind.ARG_TYPE_ANY_1);
List<FunctionArgumentType> lambdaArgs = new ArrayList<>();
FunctionArgumentType lambdaArg = new FunctionArgumentType(lambdaArgs, boolArgType);
assertThat(lambdaArg.debugString()).isEqualTo(" FUNCTION<()->BOOL>");
assertThat(lambdaArg.getKind()).isEqualTo(SignatureArgumentKind.ARG_TYPE_LAMBDA);
assertThat(lambdaArg.getType()).isNull();
checkSerializeAndDeserialize(lambdaArg);
lambdaArgs.add(t1ArgType);
lambdaArg = new FunctionArgumentType(lambdaArgs, boolArgType);
assertThat(lambdaArg.debugString()).isEqualTo(" FUNCTION<<T1>->BOOL>");
assertThat(lambdaArg.getKind()).isEqualTo(SignatureArgumentKind.ARG_TYPE_LAMBDA);
assertThat(lambdaArg.getType()).isNull();
checkSerializeAndDeserialize(lambdaArg);
lambdaArgs.add(int64ArgType);
lambdaArg = new FunctionArgumentType(lambdaArgs, boolArgType);
assertThat(lambdaArg.debugString()).isEqualTo(" FUNCTION<(<T1>, INT64)->BOOL>");
assertThat(lambdaArg.getKind()).isEqualTo(SignatureArgumentKind.ARG_TYPE_LAMBDA);
assertThat(lambdaArg.getType()).isNull();
checkSerializeAndDeserialize(lambdaArg);
lambdaArg =
new FunctionArgumentType(
lambdaArgs,
boolArgType,
FunctionArgumentTypeOptions.builder()
.setArgumentName("mylambda")
.setNamedArgumentKind(NamedArgumentKind.NAMED_ONLY)
.build());
assertThat(lambdaArg.debugString()).isEqualTo(" FUNCTION<(<T1>, INT64)->BOOL> mylambda");
assertThat(lambdaArg.getKind()).isEqualTo(SignatureArgumentKind.ARG_TYPE_LAMBDA);
assertThat(lambdaArg.getType()).isNull();
assertThat(lambdaArg.getOptions()).isNotNull();
assertThat(lambdaArg.getOptions().getArgumentName()).isEqualTo("mylambda");
checkSerializeAndDeserialize(lambdaArg);
}
@Test
public void testSerializationAndDeserializationOfFunctionArgumentType() {
FunctionArgumentType arrayTypeAny1 =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_1, ArgumentCardinality.REPEATED, 0);
FunctionArgumentType typeFixed1 =
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT32), ArgumentCardinality.REPEATED, 0);
TypeFactory factory = TypeFactory.nonUniqueNames();
FunctionArgumentType typeFixed2 =
new FunctionArgumentType(
factory.createProtoType(TypeProto.class), ArgumentCardinality.REPEATED, 0);
checkSerializeAndDeserialize(arrayTypeAny1);
checkSerializeAndDeserialize(typeFixed1);
checkSerializeAndDeserialize(typeFixed2);
}
private static void checkSerializeAndDeserialize(FunctionArgumentType functionArgumentType) {
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
checkEquals(
functionArgumentType,
FunctionArgumentType.deserialize(
functionArgumentType.serialize(fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools()));
assertThat(functionArgumentType.serialize(fileDescriptorSetsBuilder))
.isEqualTo(
FunctionArgumentType.deserialize(
functionArgumentType.serialize(fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools())
.serialize(fileDescriptorSetsBuilder));
}
@Test
public void testSerializationAndDeserializationOfFunctionArgumentTypeOptions() {
FunctionArgumentTypeOptions options =
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.REPEATED)
.setMustBeConstant(true)
.setMustBeNonNull(true)
.setIsNotAggregate(true)
.setMustSupportEquality(true)
.setMustSupportOrdering(true)
.setMustSupportGrouping(true)
.setArrayElementMustSupportEquality(true)
.setArrayElementMustSupportOrdering(true)
.setArrayElementMustSupportGrouping(true)
.setMinValue(Long.MIN_VALUE)
.setMaxValue(Long.MAX_VALUE)
.setExtraRelationInputColumnsAllowed(true)
.setRelationInputSchema(
TVFRelation.createValueTableBased(
TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))
.setArgumentName("name", FunctionEnums.NamedArgumentKind.NAMED_ONLY)
.setArgumentNameParseLocation(ParseLocationRange.create("filename1", 0, 1))
.setArgumentTypeParseLocation(ParseLocationRange.create("fielname2", 2, 3))
.setProcedureArgumentMode(ProcedureArgumentMode.INOUT)
.setDescriptorResolutionTableOffset(1234)
.setArgumentCollationMode(FunctionEnums.ArgumentCollationMode.AFFECTS_PROPAGATION)
.setArgumentAliasKind(FunctionEnums.ArgumentAliasKind.ARGUMENT_ALIASED)
.build();
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
assertThat(options)
.isEqualTo(
FunctionArgumentTypeOptions.deserialize(
options.serialize(/* argType= */ null, fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools(),
/* argType= */ null,
TypeFactory.nonUniqueNames()));
assertThat(options.serialize(/* argType= */ null, fileDescriptorSetsBuilder))
.isEqualTo(
FunctionArgumentTypeOptions.deserialize(
options.serialize(/* argType= */ null, fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools(),
/* argType= */ null,
TypeFactory.nonUniqueNames())
.serialize(/* argType= */ null, fileDescriptorSetsBuilder));
FunctionArgumentTypeOptions optionsConstantExpression =
FunctionArgumentTypeOptions.builder().setMustBeConstantExpression(true).build();
assertThat(optionsConstantExpression)
.isEqualTo(
FunctionArgumentTypeOptions.deserialize(
optionsConstantExpression.serialize(/* argType= */ null, fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools(),
/* argType= */ null,
TypeFactory.nonUniqueNames()));
assertThat(optionsConstantExpression.serialize(/* argType= */ null, fileDescriptorSetsBuilder))
.isEqualTo(
FunctionArgumentTypeOptions.deserialize(
optionsConstantExpression.serialize(
/* argType= */ null, fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools(),
/* argType= */ null,
TypeFactory.nonUniqueNames())
.serialize(/* argType= */ null, fileDescriptorSetsBuilder));
FunctionArgumentTypeOptions optionsAnalysisConstant =
FunctionArgumentTypeOptions.builder().setMustBeAnalysisConstant(true).build();
assertThat(optionsAnalysisConstant)
.isEqualTo(
FunctionArgumentTypeOptions.deserialize(
optionsAnalysisConstant.serialize(/* argType= */ null, fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools(),
/* argType= */ null,
TypeFactory.nonUniqueNames()));
assertThat(optionsAnalysisConstant.serialize(/* argType= */ null, fileDescriptorSetsBuilder))
.isEqualTo(
FunctionArgumentTypeOptions.deserialize(
optionsAnalysisConstant.serialize(
/* argType= */ null, fileDescriptorSetsBuilder),
fileDescriptorSetsBuilder.getDescriptorPools(),
/* argType= */ null,
TypeFactory.nonUniqueNames())
.serialize(/* argType= */ null, fileDescriptorSetsBuilder));
}
@Test
public void testSettingSameConstnessLevelTwiceIsAllowed() {
FunctionArgumentTypeOptions options1 =
FunctionArgumentTypeOptions.builder().setMustBeAnalysisConstant(true).build();
FunctionArgumentTypeOptions options2 =
FunctionArgumentTypeOptions.builder()
.setMustBeAnalysisConstant(true)
.setMustBeAnalysisConstant(true)
.build();
assertThat(options2).isEqualTo(options1);
FunctionArgumentTypeOptions options3 =
FunctionArgumentTypeOptions.builder().setMustBeConstant(true).build();
FunctionArgumentTypeOptions options4 =
FunctionArgumentTypeOptions.builder()
.setMustBeConstant(true)
.setMustBeConstant(true)
.build();
assertThat(options4).isEqualTo(options3);
FunctionArgumentTypeOptions options5 =
FunctionArgumentTypeOptions.builder().setMustBeConstantExpression(true).build();
FunctionArgumentTypeOptions options6 =
FunctionArgumentTypeOptions.builder()
.setMustBeConstantExpression(true)
.setMustBeConstantExpression(true)
.build();
assertThat(options6).isEqualTo(options5);
}
@Test
public void testSettingMultipleConstnessLevelsThrowsException() {
// Test setting mustBeConstant then mustBeConstantExpression
IllegalStateException e1 =
assertThrows(
IllegalStateException.class,
() ->
FunctionArgumentTypeOptions.builder()
.setMustBeConstant(true)
.setMustBeConstantExpression(true));
assertThat(e1)
.hasMessageThat()
.contains(
"Cannot set mustBeConstantExpression when another constness level is already set.");
// Test setting mustBeConstantExpression then mustBeConstant
IllegalStateException e2 =
assertThrows(
IllegalStateException.class,
() ->
FunctionArgumentTypeOptions.builder()
.setMustBeConstantExpression(true)
.setMustBeConstant(true));
assertThat(e2)
.hasMessageThat()
.contains("Cannot set mustBeConstant when another constness level is already set.");
// Test setting mustBeConstant then mustBeAnalysisConstant
IllegalStateException e3 =
assertThrows(
IllegalStateException.class,
() ->
FunctionArgumentTypeOptions.builder()
.setMustBeConstant(true)
.setMustBeAnalysisConstant(true));
assertThat(e3)
.hasMessageThat()
.contains("Cannot set mustBeAnalysisConstant when another constness level is already set.");
}
@Test
@SuppressWarnings("deprecation") // Sets deprecated field for serialization compat.
public void testSerializationAndDeserializationOfFunctionArgumentTypeOptionsWithLegacyFields() {
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
Type argType = null;
FunctionArgumentTypeOptionsProto optionsProtoNameMandatoryUnset =
FunctionArgumentTypeOptionsProto.newBuilder()
.setCardinality(ArgumentCardinality.REQUIRED)
.setArgumentName("argname")
.build();
// Deserialize a few proto shapes that use the deprecated "argument_name_is_mandatory" field in
// different ways. This test is making sure we handle the deprecated field in expected ways.
FunctionArgumentTypeOptions fromOptionsProtoNameMandatoryUnset =
FunctionArgumentTypeOptions.deserialize(
optionsProtoNameMandatoryUnset, fileDescriptorSetsBuilder.getDescriptorPools(),
argType, TypeFactory.nonUniqueNames());
FunctionArgumentTypeOptions fromOptionsProtoNameMandatoryFalse =
FunctionArgumentTypeOptions.deserialize(
optionsProtoNameMandatoryUnset.toBuilder().setArgumentNameIsMandatory(false).build(),
fileDescriptorSetsBuilder.getDescriptorPools(),
argType,
TypeFactory.nonUniqueNames());
FunctionArgumentTypeOptions fromOptionsProtoNameMandatoryTrue =
FunctionArgumentTypeOptions.deserialize(
optionsProtoNameMandatoryUnset.toBuilder().setArgumentNameIsMandatory(true).build(),
fileDescriptorSetsBuilder.getDescriptorPools(),
argType,
TypeFactory.nonUniqueNames());
FunctionArgumentTypeOptions fromOptionsProtoMandatoryWithoutName =
FunctionArgumentTypeOptions.deserialize(
optionsProtoNameMandatoryUnset.toBuilder()
.clearArgumentName()
.setArgumentNameIsMandatory(true)
.build(),
fileDescriptorSetsBuilder.getDescriptorPools(),
argType,
TypeFactory.nonUniqueNames());
// Set up several options objects that have the shapes we expect to see in deserializations.
FunctionArgumentTypeOptions optionsPositionalOrNamed =
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.REQUIRED)
.setArgumentName("argname", FunctionEnums.NamedArgumentKind.POSITIONAL_OR_NAMED)
.build();
FunctionArgumentTypeOptions optionsNamed =
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.REQUIRED)
.setArgumentName("argname", FunctionEnums.NamedArgumentKind.NAMED_ONLY)
.build();
FunctionArgumentTypeOptions optionsUnnamed =
FunctionArgumentTypeOptions.builder().setCardinality(ArgumentCardinality.REQUIRED).build();
assertThat(fromOptionsProtoNameMandatoryUnset).isEqualTo(optionsPositionalOrNamed);
assertThat(fromOptionsProtoNameMandatoryFalse).isEqualTo(optionsPositionalOrNamed);
assertThat(fromOptionsProtoNameMandatoryTrue).isEqualTo(optionsNamed);
assertThat(fromOptionsProtoMandatoryWithoutName).isEqualTo(optionsUnnamed);
}
static void checkEquals(FunctionArgumentType type1, FunctionArgumentType type2) {
assertThat(type2.getNumOccurrences()).isEqualTo(type1.getNumOccurrences());
assertThat(type2.getCardinality()).isEqualTo(type1.getCardinality());
assertThat(type2.getKind()).isEqualTo(type1.getKind());
if (type1.isConcrete()) {
assertThat(type1.getType()).isEqualTo(type2.getType());
}
if (type1.getOptions().getDefault() != null) {
assertThat(type1.getOptions().getDefault()).isEqualTo(type2.getOptions().getDefault());
} else {
assertThat(type2.getOptions().getDefault()).isNull();
}
}
@Test
public void testClassAndProtoSize() {
assertWithMessage(
"The number of fields of FunctionArgumentTypeProto has changed, "
+ "please also update the serialization code accordingly.")
.that(FunctionArgumentTypeProto.getDescriptor().getFields())
.hasSize(5);
assertWithMessage(
"The number of fields in FunctionArgumentType class has changed, "
+ "please also update the proto and serialization code accordingly.")
.that(TestUtil.getNonStaticFieldCount(FunctionArgumentType.class))
.isEqualTo(5);
}
@Test
public void testDefaultValues() {
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.REQUIRED)
.setDefault(Value.createStringValue("abc"))
.build()))
.hasMessageThat()
.contains("Default value cannot be applied to a REQUIRED argument");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.REPEATED)
.setDefault(Value.createDoubleValue(3.14))
.build()))
.hasMessageThat()
.contains("Default value cannot be applied to a REPEATED argument");
FunctionArgumentTypeOptions validOptionalArgTypeOption =
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.OPTIONAL)
.setDefault(Value.createInt32Value(10086))
.build();
FunctionArgumentTypeOptions validOptionalArgTypeOptionNull =
FunctionArgumentTypeOptions.builder()
.setCardinality(ArgumentCardinality.OPTIONAL)
.setDefault(Value.createSimpleNullValue(TypeKind.TYPE_INT32))
.build();
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_BYTES),
validOptionalArgTypeOption,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("Default value type does not match the argument type");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT64),
validOptionalArgTypeOption,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("Default value type does not match the argument type");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT64),
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("Default value type does not match the argument type");
FunctionArgumentType optionalFixedTypeInt32 =
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT32),
validOptionalArgTypeOption,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(optionalFixedTypeInt32);
FunctionArgumentType optionalFixedTypeInt32Null =
new FunctionArgumentType(
TypeFactory.createSimpleType(TypeKind.TYPE_INT32),
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(optionalFixedTypeInt32Null);
FunctionArgumentType templatedTypeNonNull =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_1,
validOptionalArgTypeOption,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(templatedTypeNonNull);
FunctionArgumentType templatedTypeNull =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_1,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(templatedTypeNull);
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_RELATION,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("ANY TABLE argument cannot have a default value");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_VOID,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("<void> argument cannot have a default value");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_MODEL,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("ANY MODEL argument cannot have a default value");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_CONNECTION,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("ANY CONNECTION argument cannot have a default value");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_DESCRIPTOR,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("ANY DESCRIPTOR argument cannot have a default value");
assertThat(
assertThrows(
IllegalArgumentException.class,
() ->
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_LAMBDA,
validOptionalArgTypeOptionNull,
/* numOccurrences= */ 1)))
.hasMessageThat()
.contains("<function<T->T>> argument cannot have a default value");
}
@Test
public void testFunctionArgumentOptionsTypeConstraint() {
FunctionArgumentTypeOptions supportsOrderingOption =
FunctionArgumentTypeOptions.builder().setMustSupportOrdering(true).build();
FunctionArgumentTypeOptions supportsEqualityOption =
FunctionArgumentTypeOptions.builder().setMustSupportEquality(true).build();
FunctionArgumentTypeOptions supportsGroupingOption =
FunctionArgumentTypeOptions.builder().setMustSupportGrouping(true).build();
FunctionArgumentTypeOptions supportsElementOrderingOption =
FunctionArgumentTypeOptions.builder().setArrayElementMustSupportOrdering(true).build();
FunctionArgumentTypeOptions supportsElementEqualityOption =
FunctionArgumentTypeOptions.builder().setArrayElementMustSupportEquality(true).build();
FunctionArgumentTypeOptions supportsElementGroupingOption =
FunctionArgumentTypeOptions.builder().setArrayElementMustSupportGrouping(true).build();
FunctionArgumentType supportOrderingType =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_1, supportsOrderingOption, /* numOccurrences= */ 1);
checkSerializeAndDeserialize(supportOrderingType);
FunctionArgumentType supportEqualityType =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_1, supportsEqualityOption, /* numOccurrences= */ 1);
checkSerializeAndDeserialize(supportEqualityType);
FunctionArgumentType supportGroupingType =
new FunctionArgumentType(
SignatureArgumentKind.ARG_TYPE_ANY_1, supportsGroupingOption, /* numOccurrences= */ 1);
checkSerializeAndDeserialize(supportGroupingType);
FunctionArgumentType supportElementOrderingType =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_1,
supportsElementOrderingOption,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(supportElementOrderingType);
FunctionArgumentType supportElementEqualityType =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_1,
supportsElementEqualityOption,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(supportElementEqualityType);
FunctionArgumentType supportElementGroupingType =
new FunctionArgumentType(
SignatureArgumentKind.ARG_ARRAY_TYPE_ANY_1,
supportsElementGroupingOption,
/* numOccurrences= */ 1);
checkSerializeAndDeserialize(supportElementGroupingType);
}
}
|
googleapis/google-cloud-java | 36,122 | java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/ListBuildTriggersResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/cloudbuild/v1/cloudbuild.proto
// Protobuf Java Version: 3.25.8
package com.google.cloudbuild.v1;
/**
*
*
* <pre>
* Response containing existing `BuildTriggers`.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v1.ListBuildTriggersResponse}
*/
public final class ListBuildTriggersResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.ListBuildTriggersResponse)
ListBuildTriggersResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBuildTriggersResponse.newBuilder() to construct.
private ListBuildTriggersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBuildTriggersResponse() {
triggers_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBuildTriggersResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_ListBuildTriggersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_ListBuildTriggersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v1.ListBuildTriggersResponse.class,
com.google.cloudbuild.v1.ListBuildTriggersResponse.Builder.class);
}
public static final int TRIGGERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloudbuild.v1.BuildTrigger> triggers_;
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloudbuild.v1.BuildTrigger> getTriggersList() {
return triggers_;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloudbuild.v1.BuildTriggerOrBuilder>
getTriggersOrBuilderList() {
return triggers_;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
@java.lang.Override
public int getTriggersCount() {
return triggers_.size();
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
@java.lang.Override
public com.google.cloudbuild.v1.BuildTrigger getTriggers(int index) {
return triggers_.get(index);
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
@java.lang.Override
public com.google.cloudbuild.v1.BuildTriggerOrBuilder getTriggersOrBuilder(int index) {
return triggers_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < triggers_.size(); i++) {
output.writeMessage(1, triggers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < triggers_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, triggers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloudbuild.v1.ListBuildTriggersResponse)) {
return super.equals(obj);
}
com.google.cloudbuild.v1.ListBuildTriggersResponse other =
(com.google.cloudbuild.v1.ListBuildTriggersResponse) obj;
if (!getTriggersList().equals(other.getTriggersList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTriggersCount() > 0) {
hash = (37 * hash) + TRIGGERS_FIELD_NUMBER;
hash = (53 * hash) + getTriggersList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloudbuild.v1.ListBuildTriggersResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response containing existing `BuildTriggers`.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v1.ListBuildTriggersResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.ListBuildTriggersResponse)
com.google.cloudbuild.v1.ListBuildTriggersResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_ListBuildTriggersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_ListBuildTriggersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v1.ListBuildTriggersResponse.class,
com.google.cloudbuild.v1.ListBuildTriggersResponse.Builder.class);
}
// Construct using com.google.cloudbuild.v1.ListBuildTriggersResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (triggersBuilder_ == null) {
triggers_ = java.util.Collections.emptyList();
} else {
triggers_ = null;
triggersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_ListBuildTriggersResponse_descriptor;
}
@java.lang.Override
public com.google.cloudbuild.v1.ListBuildTriggersResponse getDefaultInstanceForType() {
return com.google.cloudbuild.v1.ListBuildTriggersResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloudbuild.v1.ListBuildTriggersResponse build() {
com.google.cloudbuild.v1.ListBuildTriggersResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloudbuild.v1.ListBuildTriggersResponse buildPartial() {
com.google.cloudbuild.v1.ListBuildTriggersResponse result =
new com.google.cloudbuild.v1.ListBuildTriggersResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloudbuild.v1.ListBuildTriggersResponse result) {
if (triggersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
triggers_ = java.util.Collections.unmodifiableList(triggers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.triggers_ = triggers_;
} else {
result.triggers_ = triggersBuilder_.build();
}
}
private void buildPartial0(com.google.cloudbuild.v1.ListBuildTriggersResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloudbuild.v1.ListBuildTriggersResponse) {
return mergeFrom((com.google.cloudbuild.v1.ListBuildTriggersResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloudbuild.v1.ListBuildTriggersResponse other) {
if (other == com.google.cloudbuild.v1.ListBuildTriggersResponse.getDefaultInstance())
return this;
if (triggersBuilder_ == null) {
if (!other.triggers_.isEmpty()) {
if (triggers_.isEmpty()) {
triggers_ = other.triggers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTriggersIsMutable();
triggers_.addAll(other.triggers_);
}
onChanged();
}
} else {
if (!other.triggers_.isEmpty()) {
if (triggersBuilder_.isEmpty()) {
triggersBuilder_.dispose();
triggersBuilder_ = null;
triggers_ = other.triggers_;
bitField0_ = (bitField0_ & ~0x00000001);
triggersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTriggersFieldBuilder()
: null;
} else {
triggersBuilder_.addAllMessages(other.triggers_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloudbuild.v1.BuildTrigger m =
input.readMessage(
com.google.cloudbuild.v1.BuildTrigger.parser(), extensionRegistry);
if (triggersBuilder_ == null) {
ensureTriggersIsMutable();
triggers_.add(m);
} else {
triggersBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloudbuild.v1.BuildTrigger> triggers_ =
java.util.Collections.emptyList();
private void ensureTriggersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
triggers_ = new java.util.ArrayList<com.google.cloudbuild.v1.BuildTrigger>(triggers_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloudbuild.v1.BuildTrigger,
com.google.cloudbuild.v1.BuildTrigger.Builder,
com.google.cloudbuild.v1.BuildTriggerOrBuilder>
triggersBuilder_;
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public java.util.List<com.google.cloudbuild.v1.BuildTrigger> getTriggersList() {
if (triggersBuilder_ == null) {
return java.util.Collections.unmodifiableList(triggers_);
} else {
return triggersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public int getTriggersCount() {
if (triggersBuilder_ == null) {
return triggers_.size();
} else {
return triggersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public com.google.cloudbuild.v1.BuildTrigger getTriggers(int index) {
if (triggersBuilder_ == null) {
return triggers_.get(index);
} else {
return triggersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder setTriggers(int index, com.google.cloudbuild.v1.BuildTrigger value) {
if (triggersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTriggersIsMutable();
triggers_.set(index, value);
onChanged();
} else {
triggersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder setTriggers(
int index, com.google.cloudbuild.v1.BuildTrigger.Builder builderForValue) {
if (triggersBuilder_ == null) {
ensureTriggersIsMutable();
triggers_.set(index, builderForValue.build());
onChanged();
} else {
triggersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder addTriggers(com.google.cloudbuild.v1.BuildTrigger value) {
if (triggersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTriggersIsMutable();
triggers_.add(value);
onChanged();
} else {
triggersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder addTriggers(int index, com.google.cloudbuild.v1.BuildTrigger value) {
if (triggersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTriggersIsMutable();
triggers_.add(index, value);
onChanged();
} else {
triggersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder addTriggers(com.google.cloudbuild.v1.BuildTrigger.Builder builderForValue) {
if (triggersBuilder_ == null) {
ensureTriggersIsMutable();
triggers_.add(builderForValue.build());
onChanged();
} else {
triggersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder addTriggers(
int index, com.google.cloudbuild.v1.BuildTrigger.Builder builderForValue) {
if (triggersBuilder_ == null) {
ensureTriggersIsMutable();
triggers_.add(index, builderForValue.build());
onChanged();
} else {
triggersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder addAllTriggers(
java.lang.Iterable<? extends com.google.cloudbuild.v1.BuildTrigger> values) {
if (triggersBuilder_ == null) {
ensureTriggersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, triggers_);
onChanged();
} else {
triggersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder clearTriggers() {
if (triggersBuilder_ == null) {
triggers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
triggersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public Builder removeTriggers(int index) {
if (triggersBuilder_ == null) {
ensureTriggersIsMutable();
triggers_.remove(index);
onChanged();
} else {
triggersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public com.google.cloudbuild.v1.BuildTrigger.Builder getTriggersBuilder(int index) {
return getTriggersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public com.google.cloudbuild.v1.BuildTriggerOrBuilder getTriggersOrBuilder(int index) {
if (triggersBuilder_ == null) {
return triggers_.get(index);
} else {
return triggersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public java.util.List<? extends com.google.cloudbuild.v1.BuildTriggerOrBuilder>
getTriggersOrBuilderList() {
if (triggersBuilder_ != null) {
return triggersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(triggers_);
}
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public com.google.cloudbuild.v1.BuildTrigger.Builder addTriggersBuilder() {
return getTriggersFieldBuilder()
.addBuilder(com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance());
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public com.google.cloudbuild.v1.BuildTrigger.Builder addTriggersBuilder(int index) {
return getTriggersFieldBuilder()
.addBuilder(index, com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance());
}
/**
*
*
* <pre>
* `BuildTriggers` for the project, sorted by `create_time` descending.
* </pre>
*
* <code>repeated .google.devtools.cloudbuild.v1.BuildTrigger triggers = 1;</code>
*/
public java.util.List<com.google.cloudbuild.v1.BuildTrigger.Builder> getTriggersBuilderList() {
return getTriggersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloudbuild.v1.BuildTrigger,
com.google.cloudbuild.v1.BuildTrigger.Builder,
com.google.cloudbuild.v1.BuildTriggerOrBuilder>
getTriggersFieldBuilder() {
if (triggersBuilder_ == null) {
triggersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloudbuild.v1.BuildTrigger,
com.google.cloudbuild.v1.BuildTrigger.Builder,
com.google.cloudbuild.v1.BuildTriggerOrBuilder>(
triggers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
triggers_ = null;
}
return triggersBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to receive the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.ListBuildTriggersResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListBuildTriggersResponse)
private static final com.google.cloudbuild.v1.ListBuildTriggersResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloudbuild.v1.ListBuildTriggersResponse();
}
public static com.google.cloudbuild.v1.ListBuildTriggersResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBuildTriggersResponse> PARSER =
new com.google.protobuf.AbstractParser<ListBuildTriggersResponse>() {
@java.lang.Override
public ListBuildTriggersResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBuildTriggersResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBuildTriggersResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloudbuild.v1.ListBuildTriggersResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,103 | java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/StageExecutionDetails.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/dataflow/v1beta3/metrics.proto
// Protobuf Java Version: 3.25.8
package com.google.dataflow.v1beta3;
/**
*
*
* <pre>
* Information about the workers and work items within a stage.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.StageExecutionDetails}
*/
public final class StageExecutionDetails extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.StageExecutionDetails)
StageExecutionDetailsOrBuilder {
private static final long serialVersionUID = 0L;
// Use StageExecutionDetails.newBuilder() to construct.
private StageExecutionDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StageExecutionDetails() {
workers_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StageExecutionDetails();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_StageExecutionDetails_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_StageExecutionDetails_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.StageExecutionDetails.class,
com.google.dataflow.v1beta3.StageExecutionDetails.Builder.class);
}
public static final int WORKERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.dataflow.v1beta3.WorkerDetails> workers_;
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.dataflow.v1beta3.WorkerDetails> getWorkersList() {
return workers_;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.dataflow.v1beta3.WorkerDetailsOrBuilder>
getWorkersOrBuilderList() {
return workers_;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
@java.lang.Override
public int getWorkersCount() {
return workers_.size();
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.WorkerDetails getWorkers(int index) {
return workers_.get(index);
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
@java.lang.Override
public com.google.dataflow.v1beta3.WorkerDetailsOrBuilder getWorkersOrBuilder(int index) {
return workers_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < workers_.size(); i++) {
output.writeMessage(1, workers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < workers_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, workers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.dataflow.v1beta3.StageExecutionDetails)) {
return super.equals(obj);
}
com.google.dataflow.v1beta3.StageExecutionDetails other =
(com.google.dataflow.v1beta3.StageExecutionDetails) obj;
if (!getWorkersList().equals(other.getWorkersList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getWorkersCount() > 0) {
hash = (37 * hash) + WORKERS_FIELD_NUMBER;
hash = (53 * hash) + getWorkersList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.StageExecutionDetails parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.dataflow.v1beta3.StageExecutionDetails prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information about the workers and work items within a stage.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.StageExecutionDetails}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.StageExecutionDetails)
com.google.dataflow.v1beta3.StageExecutionDetailsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_StageExecutionDetails_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_StageExecutionDetails_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.StageExecutionDetails.class,
com.google.dataflow.v1beta3.StageExecutionDetails.Builder.class);
}
// Construct using com.google.dataflow.v1beta3.StageExecutionDetails.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (workersBuilder_ == null) {
workers_ = java.util.Collections.emptyList();
} else {
workers_ = null;
workersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.dataflow.v1beta3.MetricsProto
.internal_static_google_dataflow_v1beta3_StageExecutionDetails_descriptor;
}
@java.lang.Override
public com.google.dataflow.v1beta3.StageExecutionDetails getDefaultInstanceForType() {
return com.google.dataflow.v1beta3.StageExecutionDetails.getDefaultInstance();
}
@java.lang.Override
public com.google.dataflow.v1beta3.StageExecutionDetails build() {
com.google.dataflow.v1beta3.StageExecutionDetails result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.dataflow.v1beta3.StageExecutionDetails buildPartial() {
com.google.dataflow.v1beta3.StageExecutionDetails result =
new com.google.dataflow.v1beta3.StageExecutionDetails(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.dataflow.v1beta3.StageExecutionDetails result) {
if (workersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
workers_ = java.util.Collections.unmodifiableList(workers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.workers_ = workers_;
} else {
result.workers_ = workersBuilder_.build();
}
}
private void buildPartial0(com.google.dataflow.v1beta3.StageExecutionDetails result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.dataflow.v1beta3.StageExecutionDetails) {
return mergeFrom((com.google.dataflow.v1beta3.StageExecutionDetails) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.dataflow.v1beta3.StageExecutionDetails other) {
if (other == com.google.dataflow.v1beta3.StageExecutionDetails.getDefaultInstance())
return this;
if (workersBuilder_ == null) {
if (!other.workers_.isEmpty()) {
if (workers_.isEmpty()) {
workers_ = other.workers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureWorkersIsMutable();
workers_.addAll(other.workers_);
}
onChanged();
}
} else {
if (!other.workers_.isEmpty()) {
if (workersBuilder_.isEmpty()) {
workersBuilder_.dispose();
workersBuilder_ = null;
workers_ = other.workers_;
bitField0_ = (bitField0_ & ~0x00000001);
workersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getWorkersFieldBuilder()
: null;
} else {
workersBuilder_.addAllMessages(other.workers_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.dataflow.v1beta3.WorkerDetails m =
input.readMessage(
com.google.dataflow.v1beta3.WorkerDetails.parser(), extensionRegistry);
if (workersBuilder_ == null) {
ensureWorkersIsMutable();
workers_.add(m);
} else {
workersBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.dataflow.v1beta3.WorkerDetails> workers_ =
java.util.Collections.emptyList();
private void ensureWorkersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
workers_ = new java.util.ArrayList<com.google.dataflow.v1beta3.WorkerDetails>(workers_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.WorkerDetails,
com.google.dataflow.v1beta3.WorkerDetails.Builder,
com.google.dataflow.v1beta3.WorkerDetailsOrBuilder>
workersBuilder_;
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public java.util.List<com.google.dataflow.v1beta3.WorkerDetails> getWorkersList() {
if (workersBuilder_ == null) {
return java.util.Collections.unmodifiableList(workers_);
} else {
return workersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public int getWorkersCount() {
if (workersBuilder_ == null) {
return workers_.size();
} else {
return workersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public com.google.dataflow.v1beta3.WorkerDetails getWorkers(int index) {
if (workersBuilder_ == null) {
return workers_.get(index);
} else {
return workersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder setWorkers(int index, com.google.dataflow.v1beta3.WorkerDetails value) {
if (workersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWorkersIsMutable();
workers_.set(index, value);
onChanged();
} else {
workersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder setWorkers(
int index, com.google.dataflow.v1beta3.WorkerDetails.Builder builderForValue) {
if (workersBuilder_ == null) {
ensureWorkersIsMutable();
workers_.set(index, builderForValue.build());
onChanged();
} else {
workersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder addWorkers(com.google.dataflow.v1beta3.WorkerDetails value) {
if (workersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWorkersIsMutable();
workers_.add(value);
onChanged();
} else {
workersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder addWorkers(int index, com.google.dataflow.v1beta3.WorkerDetails value) {
if (workersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWorkersIsMutable();
workers_.add(index, value);
onChanged();
} else {
workersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder addWorkers(com.google.dataflow.v1beta3.WorkerDetails.Builder builderForValue) {
if (workersBuilder_ == null) {
ensureWorkersIsMutable();
workers_.add(builderForValue.build());
onChanged();
} else {
workersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder addWorkers(
int index, com.google.dataflow.v1beta3.WorkerDetails.Builder builderForValue) {
if (workersBuilder_ == null) {
ensureWorkersIsMutable();
workers_.add(index, builderForValue.build());
onChanged();
} else {
workersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder addAllWorkers(
java.lang.Iterable<? extends com.google.dataflow.v1beta3.WorkerDetails> values) {
if (workersBuilder_ == null) {
ensureWorkersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, workers_);
onChanged();
} else {
workersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder clearWorkers() {
if (workersBuilder_ == null) {
workers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
workersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public Builder removeWorkers(int index) {
if (workersBuilder_ == null) {
ensureWorkersIsMutable();
workers_.remove(index);
onChanged();
} else {
workersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public com.google.dataflow.v1beta3.WorkerDetails.Builder getWorkersBuilder(int index) {
return getWorkersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public com.google.dataflow.v1beta3.WorkerDetailsOrBuilder getWorkersOrBuilder(int index) {
if (workersBuilder_ == null) {
return workers_.get(index);
} else {
return workersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public java.util.List<? extends com.google.dataflow.v1beta3.WorkerDetailsOrBuilder>
getWorkersOrBuilderList() {
if (workersBuilder_ != null) {
return workersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(workers_);
}
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public com.google.dataflow.v1beta3.WorkerDetails.Builder addWorkersBuilder() {
return getWorkersFieldBuilder()
.addBuilder(com.google.dataflow.v1beta3.WorkerDetails.getDefaultInstance());
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public com.google.dataflow.v1beta3.WorkerDetails.Builder addWorkersBuilder(int index) {
return getWorkersFieldBuilder()
.addBuilder(index, com.google.dataflow.v1beta3.WorkerDetails.getDefaultInstance());
}
/**
*
*
* <pre>
* Workers that have done work on the stage.
* </pre>
*
* <code>repeated .google.dataflow.v1beta3.WorkerDetails workers = 1;</code>
*/
public java.util.List<com.google.dataflow.v1beta3.WorkerDetails.Builder>
getWorkersBuilderList() {
return getWorkersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.WorkerDetails,
com.google.dataflow.v1beta3.WorkerDetails.Builder,
com.google.dataflow.v1beta3.WorkerDetailsOrBuilder>
getWorkersFieldBuilder() {
if (workersBuilder_ == null) {
workersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.dataflow.v1beta3.WorkerDetails,
com.google.dataflow.v1beta3.WorkerDetails.Builder,
com.google.dataflow.v1beta3.WorkerDetailsOrBuilder>(
workers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
workers_ = null;
}
return workersBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this response does not contain all requested tasks. To obtain
* the next page of results, repeat the request with page_token set to this
* value.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.StageExecutionDetails)
}
// @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.StageExecutionDetails)
private static final com.google.dataflow.v1beta3.StageExecutionDetails DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.StageExecutionDetails();
}
public static com.google.dataflow.v1beta3.StageExecutionDetails getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StageExecutionDetails> PARSER =
new com.google.protobuf.AbstractParser<StageExecutionDetails>() {
@java.lang.Override
public StageExecutionDetails parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StageExecutionDetails> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StageExecutionDetails> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.dataflow.v1beta3.StageExecutionDetails getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,197 | java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/TimeInterval.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/common.proto
// Protobuf Java Version: 3.25.8
package com.google.monitoring.v3;
/**
*
*
* <pre>
* Describes a time interval:
*
* * Reads: A half-open time interval. It includes the end time but
* excludes the start time: `(startTime, endTime]`. The start time
* must be specified, must be earlier than the end time, and should be
* no older than the data retention period for the metric.
* * Writes: A closed time interval. It extends from the start time to the end
* time,
* and includes both: `[startTime, endTime]`. Valid time intervals
* depend on the
* [`MetricKind`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors#MetricKind)
* of the metric value. The end time must not be earlier than the start
* time, and the end time must not be more than 25 hours in the past or more
* than five minutes in the future.
* * For `GAUGE` metrics, the `startTime` value is technically optional; if
* no value is specified, the start time defaults to the value of the
* end time, and the interval represents a single point in time. If both
* start and end times are specified, they must be identical. Such an
* interval is valid only for `GAUGE` metrics, which are point-in-time
* measurements. The end time of a new interval must be at least a
* millisecond after the end time of the previous interval.
* * For `DELTA` metrics, the start time and end time must specify a
* non-zero interval, with subsequent points specifying contiguous and
* non-overlapping intervals. For `DELTA` metrics, the start time of
* the next interval must be at least a millisecond after the end time
* of the previous interval.
* * For `CUMULATIVE` metrics, the start time and end time must specify a
* non-zero interval, with subsequent points specifying the same
* start time and increasing end times, until an event resets the
* cumulative value to zero and sets a new start time for the following
* points. The new start time must be at least a millisecond after the
* end time of the previous interval.
* * The start time of a new interval must be at least a millisecond after
* the
* end time of the previous interval because intervals are closed. If the
* start time of a new interval is the same as the end time of the
* previous interval, then data written at the new start time could
* overwrite data written at the previous end time.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.TimeInterval}
*/
public final class TimeInterval extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.TimeInterval)
TimeIntervalOrBuilder {
private static final long serialVersionUID = 0L;
// Use TimeInterval.newBuilder() to construct.
private TimeInterval(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TimeInterval() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TimeInterval();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.CommonProto
.internal_static_google_monitoring_v3_TimeInterval_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.CommonProto
.internal_static_google_monitoring_v3_TimeInterval_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.TimeInterval.class,
com.google.monitoring.v3.TimeInterval.Builder.class);
}
private int bitField0_;
public static final int END_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp endTime_;
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*
* @return Whether the endTime field is set.
*/
@java.lang.Override
public boolean hasEndTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*
* @return The endTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getEndTime() {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
}
public static final int START_TIME_FIELD_NUMBER = 1;
private com.google.protobuf.Timestamp startTime_;
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*
* @return Whether the startTime field is set.
*/
@java.lang.Override
public boolean hasStartTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*
* @return The startTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getStartTime() {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(1, getStartTime());
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getEndTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getStartTime());
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEndTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.TimeInterval)) {
return super.equals(obj);
}
com.google.monitoring.v3.TimeInterval other = (com.google.monitoring.v3.TimeInterval) obj;
if (hasEndTime() != other.hasEndTime()) return false;
if (hasEndTime()) {
if (!getEndTime().equals(other.getEndTime())) return false;
}
if (hasStartTime() != other.hasStartTime()) return false;
if (hasStartTime()) {
if (!getStartTime().equals(other.getStartTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEndTime()) {
hash = (37 * hash) + END_TIME_FIELD_NUMBER;
hash = (53 * hash) + getEndTime().hashCode();
}
if (hasStartTime()) {
hash = (37 * hash) + START_TIME_FIELD_NUMBER;
hash = (53 * hash) + getStartTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.TimeInterval parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.TimeInterval parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.TimeInterval parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.TimeInterval parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.TimeInterval prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Describes a time interval:
*
* * Reads: A half-open time interval. It includes the end time but
* excludes the start time: `(startTime, endTime]`. The start time
* must be specified, must be earlier than the end time, and should be
* no older than the data retention period for the metric.
* * Writes: A closed time interval. It extends from the start time to the end
* time,
* and includes both: `[startTime, endTime]`. Valid time intervals
* depend on the
* [`MetricKind`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors#MetricKind)
* of the metric value. The end time must not be earlier than the start
* time, and the end time must not be more than 25 hours in the past or more
* than five minutes in the future.
* * For `GAUGE` metrics, the `startTime` value is technically optional; if
* no value is specified, the start time defaults to the value of the
* end time, and the interval represents a single point in time. If both
* start and end times are specified, they must be identical. Such an
* interval is valid only for `GAUGE` metrics, which are point-in-time
* measurements. The end time of a new interval must be at least a
* millisecond after the end time of the previous interval.
* * For `DELTA` metrics, the start time and end time must specify a
* non-zero interval, with subsequent points specifying contiguous and
* non-overlapping intervals. For `DELTA` metrics, the start time of
* the next interval must be at least a millisecond after the end time
* of the previous interval.
* * For `CUMULATIVE` metrics, the start time and end time must specify a
* non-zero interval, with subsequent points specifying the same
* start time and increasing end times, until an event resets the
* cumulative value to zero and sets a new start time for the following
* points. The new start time must be at least a millisecond after the
* end time of the previous interval.
* * The start time of a new interval must be at least a millisecond after
* the
* end time of the previous interval because intervals are closed. If the
* start time of a new interval is the same as the end time of the
* previous interval, then data written at the new start time could
* overwrite data written at the previous end time.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.TimeInterval}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.TimeInterval)
com.google.monitoring.v3.TimeIntervalOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.CommonProto
.internal_static_google_monitoring_v3_TimeInterval_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.CommonProto
.internal_static_google_monitoring_v3_TimeInterval_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.TimeInterval.class,
com.google.monitoring.v3.TimeInterval.Builder.class);
}
// Construct using com.google.monitoring.v3.TimeInterval.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEndTimeFieldBuilder();
getStartTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
endTime_ = null;
if (endTimeBuilder_ != null) {
endTimeBuilder_.dispose();
endTimeBuilder_ = null;
}
startTime_ = null;
if (startTimeBuilder_ != null) {
startTimeBuilder_.dispose();
startTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.CommonProto
.internal_static_google_monitoring_v3_TimeInterval_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.TimeInterval getDefaultInstanceForType() {
return com.google.monitoring.v3.TimeInterval.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.TimeInterval build() {
com.google.monitoring.v3.TimeInterval result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.TimeInterval buildPartial() {
com.google.monitoring.v3.TimeInterval result =
new com.google.monitoring.v3.TimeInterval(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.monitoring.v3.TimeInterval result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.endTime_ = endTimeBuilder_ == null ? endTime_ : endTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.TimeInterval) {
return mergeFrom((com.google.monitoring.v3.TimeInterval) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.TimeInterval other) {
if (other == com.google.monitoring.v3.TimeInterval.getDefaultInstance()) return this;
if (other.hasEndTime()) {
mergeEndTime(other.getEndTime());
}
if (other.hasStartTime()) {
mergeStartTime(other.getStartTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
input.readMessage(getEndTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Timestamp endTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
endTimeBuilder_;
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*
* @return Whether the endTime field is set.
*/
public boolean hasEndTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*
* @return The endTime.
*/
public com.google.protobuf.Timestamp getEndTime() {
if (endTimeBuilder_ == null) {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
} else {
return endTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder setEndTime(com.google.protobuf.Timestamp value) {
if (endTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
endTime_ = value;
} else {
endTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (endTimeBuilder_ == null) {
endTime_ = builderForValue.build();
} else {
endTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder mergeEndTime(com.google.protobuf.Timestamp value) {
if (endTimeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& endTime_ != null
&& endTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getEndTimeBuilder().mergeFrom(value);
} else {
endTime_ = value;
}
} else {
endTimeBuilder_.mergeFrom(value);
}
if (endTime_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder clearEndTime() {
bitField0_ = (bitField0_ & ~0x00000001);
endTime_ = null;
if (endTimeBuilder_ != null) {
endTimeBuilder_.dispose();
endTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getEndTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {
if (endTimeBuilder_ != null) {
return endTimeBuilder_.getMessageOrBuilder();
} else {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
}
}
/**
*
*
* <pre>
* Required. The end of the time interval.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getEndTimeFieldBuilder() {
if (endTimeBuilder_ == null) {
endTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getEndTime(), getParentForChildren(), isClean());
endTime_ = null;
}
return endTimeBuilder_;
}
private com.google.protobuf.Timestamp startTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
startTimeBuilder_;
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*
* @return Whether the startTime field is set.
*/
public boolean hasStartTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*
* @return The startTime.
*/
public com.google.protobuf.Timestamp getStartTime() {
if (startTimeBuilder_ == null) {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
} else {
return startTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder setStartTime(com.google.protobuf.Timestamp value) {
if (startTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
startTime_ = value;
} else {
startTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (startTimeBuilder_ == null) {
startTime_ = builderForValue.build();
} else {
startTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder mergeStartTime(com.google.protobuf.Timestamp value) {
if (startTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& startTime_ != null
&& startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getStartTimeBuilder().mergeFrom(value);
} else {
startTime_ = value;
}
} else {
startTimeBuilder_.mergeFrom(value);
}
if (startTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder clearStartTime() {
bitField0_ = (bitField0_ & ~0x00000002);
startTime_ = null;
if (startTimeBuilder_ != null) {
startTimeBuilder_.dispose();
startTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStartTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
if (startTimeBuilder_ != null) {
return startTimeBuilder_.getMessageOrBuilder();
} else {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
}
/**
*
*
* <pre>
* Optional. The beginning of the time interval. The default value
* for the start time is the end time. The start time must not be
* later than the end time.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getStartTimeFieldBuilder() {
if (startTimeBuilder_ == null) {
startTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getStartTime(), getParentForChildren(), isClean());
startTime_ = null;
}
return startTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.TimeInterval)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.TimeInterval)
private static final com.google.monitoring.v3.TimeInterval DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.TimeInterval();
}
public static com.google.monitoring.v3.TimeInterval getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TimeInterval> PARSER =
new com.google.protobuf.AbstractParser<TimeInterval>() {
@java.lang.Override
public TimeInterval parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TimeInterval> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TimeInterval> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.TimeInterval getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 36,421 | jdk/src/solaris/classes/sun/awt/X11FontManager.java | package sun.awt;
import java.awt.GraphicsEnvironment;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.StreamTokenizer;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.StringTokenizer;
import java.util.Vector;
import javax.swing.plaf.FontUIResource;
import sun.awt.motif.MFontConfiguration;
import sun.font.CompositeFont;
import sun.font.FontManager;
import sun.font.SunFontManager;
import sun.font.FontConfigManager;
import sun.font.FcFontConfiguration;
import sun.font.FontAccess;
import sun.font.FontUtilities;
import sun.font.NativeFont;
import sun.util.logging.PlatformLogger;
/**
* The X11 implementation of {@link FontManager}.
*/
public class X11FontManager extends SunFontManager {
// constants identifying XLFD and font ID fields
private static final int FOUNDRY_FIELD = 1;
private static final int FAMILY_NAME_FIELD = 2;
private static final int WEIGHT_NAME_FIELD = 3;
private static final int SLANT_FIELD = 4;
private static final int SETWIDTH_NAME_FIELD = 5;
private static final int ADD_STYLE_NAME_FIELD = 6;
private static final int PIXEL_SIZE_FIELD = 7;
private static final int POINT_SIZE_FIELD = 8;
private static final int RESOLUTION_X_FIELD = 9;
private static final int RESOLUTION_Y_FIELD = 10;
private static final int SPACING_FIELD = 11;
private static final int AVERAGE_WIDTH_FIELD = 12;
private static final int CHARSET_REGISTRY_FIELD = 13;
private static final int CHARSET_ENCODING_FIELD = 14;
/*
* fontNameMap is a map from a fontID (which is a substring of an XLFD like
* "-monotype-arial-bold-r-normal-iso8859-7")
* to font file path like
* /usr/openwin/lib/locale/iso_8859_7/X11/fonts/TrueType/ArialBoldItalic.ttf
* It's used in a couple of methods like
* getFileNameFomPlatformName(..) to help locate the font file.
* We use this substring of a full XLFD because the font configuration files
* define the XLFDs in a way that's easier to make into a request.
* E.g., the -0-0-0-0-p-0- reported by X is -*-%d-*-*-p-*- in the font
* configuration files. We need to remove that part for comparisons.
*/
private static Map fontNameMap = new HashMap();
/*
* xlfdMap is a map from a platform path like
* /usr/openwin/lib/locale/ja/X11/fonts/TT/HG-GothicB.ttf to an XLFD like
* "-ricoh-hg gothic b-medium-r-normal--0-0-0-0-m-0-jisx0201.1976-0"
* Because there may be multiple native names, because the font is used
* to support multiple X encodings for example, the value of an entry in
* this map is always a vector where we store all the native names.
* For fonts which we don't understand the key isn't a pathname, its
* the full XLFD string like :-
* "-ricoh-hg gothic b-medium-r-normal--0-0-0-0-m-0-jisx0201.1976-0"
*/
private static Map xlfdMap = new HashMap();
/* xFontDirsMap is also a map from a font ID to a font filepath.
* The difference from fontNameMap is just that it does not have
* resolved symbolic links. Normally this is not interesting except
* that we need to know the directory in which a font was found to
* add it to the X font server path, since although the files may
* be linked, the fonts.dir is different and specific to the encoding
* handled by that directory. This map is nulled out after use to free
* heap space. If the optimal path is taken, such that all fonts in
* font configuration files are referenced by filename, then the font
* dir can be directly derived as its parent directory.
* If a font is used by two XLFDs, each corresponding to a different
* X11 font directory, then precautions must be taken to include both
* directories.
*/
private static Map xFontDirsMap;
/*
* This is the set of font directories needed to be on the X font path
* to enable AWT heavyweights to find all of the font configuration fonts.
* It is populated by :
* - awtfontpath entries in the fontconfig.properties
* - parent directories of "core" fonts used in the fontconfig.properties
* - looking up font dirs in the xFontDirsMap where the key is a fontID
* (cut down version of the XLFD read from the font configuration file).
* This set is nulled out after use to free heap space.
*/
private static HashSet<String> fontConfigDirs = null;
/* These maps are used on Linux where we reference the Lucida oblique
* fonts in fontconfig files even though they aren't in the standard
* font directory. This explicitly remaps the XLFDs for these to the
* correct base font. This is needed to prevent composite fonts from
* defaulting to the Lucida Sans which is a bad substitute for the
* monospaced Lucida Sans Typewriter. Also these maps prevent the
* JRE from doing wasted work at start up.
*/
HashMap<String, String> oblmap = null;
/*
* Used to eliminate redundant work. When a font directory is
* registered it added to this list. Subsequent registrations for the
* same directory can then be skipped by checking this Map.
* Access to this map is not synchronised here since creation
* of the singleton GE instance is already synchronised and that is
* the only code path that accesses this map.
*/
private static HashMap registeredDirs = new HashMap();
/* Array of directories to be added to the X11 font path.
* Used by static method called from Toolkits which use X11 fonts.
* Specifically this means MToolkit
*/
private static String[] fontdirs = null;
private static String[] defaultPlatformFont = null;
private FontConfigManager fcManager = null;
public static X11FontManager getInstance() {
return (X11FontManager) SunFontManager.getInstance();
}
/**
* Takes family name property in the following format:
* "-linotype-helvetica-medium-r-normal-sans-*-%d-*-*-p-*-iso8859-1"
* and returns the name of the corresponding physical font.
* This code is used to resolve font configuration fonts, and expects
* only to get called for these fonts.
*/
@Override
public String getFileNameFromPlatformName(String platName) {
/* If the FontConfig file doesn't use xlfds, or its
* FcFontConfiguration, this may be already a file name.
*/
if (platName.startsWith("/")) {
return platName;
}
String fileName = null;
String fontID = specificFontIDForName(platName);
/* If the font filename has been explicitly assigned in the
* font configuration file, use it. This avoids accessing
* the wrong fonts on Linux, where different fonts (some
* of which may not be usable by 2D) may share the same
* specific font ID. It may also speed up the lookup.
*/
fileName = super.getFileNameFromPlatformName(platName);
if (fileName != null) {
if (isHeadless() && fileName.startsWith("-")) {
/* if it's headless, no xlfd should be used */
return null;
}
if (fileName.startsWith("/")) {
/* If a path is assigned in the font configuration file,
* it is required that the config file also specify using the
* new awtfontpath key the X11 font directories
* which must be added to the X11 font path to support
* AWT access to that font. For that reason we no longer
* have code here to add the parent directory to the list
* of font config dirs, since the parent directory may not
* be sufficient if fonts are symbolically linked to a
* different directory.
*
* Add this XLFD (platform name) to the list of known
* ones for this file.
*/
Vector xVal = (Vector) xlfdMap.get(fileName);
if (xVal == null) {
/* Try to be robust on Linux distros which move fonts
* around by verifying that the fileName represents a
* file that exists. If it doesn't, set it to null
* to trigger a search.
*/
if (getFontConfiguration().needToSearchForFile(fileName)) {
fileName = null;
}
if (fileName != null) {
xVal = new Vector();
xVal.add(platName);
xlfdMap.put(fileName, xVal);
}
} else {
if (!xVal.contains(platName)) {
xVal.add(platName);
}
}
}
if (fileName != null) {
fontNameMap.put(fontID, fileName);
return fileName;
}
}
if (fontID != null) {
fileName = (String)fontNameMap.get(fontID);
/* On Linux check for the Lucida Oblique fonts */
if (fileName == null && FontUtilities.isLinux && !isOpenJDK()) {
if (oblmap == null) {
initObliqueLucidaFontMap();
}
String oblkey = getObliqueLucidaFontID(fontID);
if (oblkey != null) {
fileName = oblmap.get(oblkey);
}
}
if (fontPath == null &&
(fileName == null || !fileName.startsWith("/"))) {
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger()
.warning("** Registering all font paths because " +
"can't find file for " + platName);
}
fontPath = getPlatformFontPath(noType1Font);
registerFontDirs(fontPath);
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger()
.warning("** Finished registering all font paths");
}
fileName = (String)fontNameMap.get(fontID);
}
if (fileName == null && !isHeadless()) {
/* Query X11 directly to see if this font is available
* as a native font.
*/
fileName = getX11FontName(platName);
}
if (fileName == null) {
fontID = switchFontIDForName(platName);
fileName = (String)fontNameMap.get(fontID);
}
if (fileName != null) {
fontNameMap.put(fontID, fileName);
}
}
return fileName;
}
@Override
protected String[] getNativeNames(String fontFileName,
String platformName) {
Vector nativeNames;
if ((nativeNames=(Vector)xlfdMap.get(fontFileName))==null) {
if (platformName == null) {
return null;
} else {
/* back-stop so that at least the name used in the
* font configuration file is known as a native name
*/
String []natNames = new String[1];
natNames[0] = platformName;
return natNames;
}
} else {
int len = nativeNames.size();
return (String[])nativeNames.toArray(new String[len]);
}
}
/* NOTE: this method needs to be executed in a privileged context.
* The superclass constructor which is the primary caller of
* this method executes entirely in such a context. Additionally
* the loadFonts() method does too. So all should be well.
*/
@Override
protected void registerFontDir(String path) {
/* fonts.dir file format looks like :-
* 47
* Arial.ttf -monotype-arial-regular-r-normal--0-0-0-0-p-0-iso8859-1
* Arial-Bold.ttf -monotype-arial-bold-r-normal--0-0-0-0-p-0-iso8859-1
* ...
*/
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger().info("ParseFontDir " + path);
}
File fontsDotDir = new File(path + File.separator + "fonts.dir");
FileReader fr = null;
try {
if (fontsDotDir.canRead()) {
fr = new FileReader(fontsDotDir);
BufferedReader br = new BufferedReader(fr, 8192);
StreamTokenizer st = new StreamTokenizer(br);
st.eolIsSignificant(true);
int ttype = st.nextToken();
if (ttype == StreamTokenizer.TT_NUMBER) {
int numEntries = (int)st.nval;
ttype = st.nextToken();
if (ttype == StreamTokenizer.TT_EOL) {
st.resetSyntax();
st.wordChars(32, 127);
st.wordChars(128 + 32, 255);
st.whitespaceChars(0, 31);
for (int i=0; i < numEntries; i++) {
ttype = st.nextToken();
if (ttype == StreamTokenizer.TT_EOF) {
break;
}
if (ttype != StreamTokenizer.TT_WORD) {
break;
}
int breakPos = st.sval.indexOf(' ');
if (breakPos <= 0) {
/* On TurboLinux 8.0 a fonts.dir file had
* a line with integer value "24" which
* appeared to be the number of remaining
* entries in the file. This didn't add to
* the value on the first line of the file.
* Seemed like XFree86 didn't like this line
* much either. It failed to parse the file.
* Ignore lines like this completely, and
* don't let them count as an entry.
*/
numEntries++;
ttype = st.nextToken();
if (ttype != StreamTokenizer.TT_EOL) {
break;
}
continue;
}
if (st.sval.charAt(0) == '!') {
/* TurboLinux 8.0 comment line: ignore.
* can't use st.commentChar('!') to just
* skip because this line mustn't count
* against numEntries.
*/
numEntries++;
ttype = st.nextToken();
if (ttype != StreamTokenizer.TT_EOL) {
break;
}
continue;
}
String fileName = st.sval.substring(0, breakPos);
/* TurboLinux 8.0 uses some additional syntax to
* indicate algorithmic styling values.
* Ignore ':' separated files at the beginning
* of the fileName
*/
int lastColon = fileName.lastIndexOf(':');
if (lastColon > 0) {
if (lastColon+1 >= fileName.length()) {
continue;
}
fileName = fileName.substring(lastColon+1);
}
String fontPart = st.sval.substring(breakPos+1);
String fontID = specificFontIDForName(fontPart);
String sVal = (String) fontNameMap.get(fontID);
if (FontUtilities.debugFonts()) {
PlatformLogger logger = FontUtilities.getLogger();
logger.info("file=" + fileName +
" xlfd=" + fontPart);
logger.info("fontID=" + fontID +
" sVal=" + sVal);
}
String fullPath = null;
try {
File file = new File(path,fileName);
/* we may have a resolved symbolic link
* this becomes important for an xlfd we
* still need to know the location it was
* found to update the X server font path
* for use by AWT heavyweights - and when 2D
* wants to use the native rasteriser.
*/
if (xFontDirsMap == null) {
xFontDirsMap = new HashMap();
}
xFontDirsMap.put(fontID, path);
fullPath = file.getCanonicalPath();
} catch (IOException e) {
fullPath = path + File.separator + fileName;
}
Vector xVal = (Vector) xlfdMap.get(fullPath);
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger()
.info("fullPath=" + fullPath +
" xVal=" + xVal);
}
if ((xVal == null || !xVal.contains(fontPart)) &&
(sVal == null) || !sVal.startsWith("/")) {
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger()
.info("Map fontID:"+fontID +
"to file:" + fullPath);
}
fontNameMap.put(fontID, fullPath);
if (xVal == null) {
xVal = new Vector();
xlfdMap.put (fullPath, xVal);
}
xVal.add(fontPart);
}
ttype = st.nextToken();
if (ttype != StreamTokenizer.TT_EOL) {
break;
}
}
}
}
fr.close();
}
} catch (IOException ioe1) {
} finally {
if (fr != null) {
try {
fr.close();
} catch (IOException ioe2) {
}
}
}
}
@Override
public void loadFonts() {
super.loadFonts();
/* These maps are greatly expanded during a loadFonts but
* can be reset to their initial state afterwards.
* Since preferLocaleFonts() and preferProportionalFonts() will
* trigger a partial repopulating from the FontConfiguration
* it has to be the inital (empty) state for the latter two, not
* simply nulling out.
* xFontDirsMap is a special case in that the implementation
* will typically not ever need to initialise it so it can be null.
*/
xFontDirsMap = null;
xlfdMap = new HashMap(1);
fontNameMap = new HashMap(1);
}
private String getObliqueLucidaFontID(String fontID) {
if (fontID.startsWith("-lucidasans-medium-i-normal") ||
fontID.startsWith("-lucidasans-bold-i-normal") ||
fontID.startsWith("-lucidatypewriter-medium-i-normal") ||
fontID.startsWith("-lucidatypewriter-bold-i-normal")) {
return fontID.substring(0, fontID.indexOf("-i-"));
} else {
return null;
}
}
private static String getX11FontName(String platName) {
String xlfd = platName.replaceAll("%d", "*");
if (NativeFont.fontExists(xlfd)) {
return xlfd;
} else {
return null;
}
}
private void initObliqueLucidaFontMap() {
oblmap = new HashMap<String, String>();
oblmap.put("-lucidasans-medium",
jreLibDirName+"/fonts/LucidaSansRegular.ttf");
oblmap.put("-lucidasans-bold",
jreLibDirName+"/fonts/LucidaSansDemiBold.ttf");
oblmap.put("-lucidatypewriter-medium",
jreLibDirName+"/fonts/LucidaTypewriterRegular.ttf");
oblmap.put("-lucidatypewriter-bold",
jreLibDirName+"/fonts/LucidaTypewriterBold.ttf");
}
private boolean isHeadless() {
GraphicsEnvironment ge =
GraphicsEnvironment.getLocalGraphicsEnvironment();
return GraphicsEnvironment.isHeadless();
}
private String specificFontIDForName(String name) {
int[] hPos = new int[14];
int hyphenCnt = 1;
int pos = 1;
while (pos != -1 && hyphenCnt < 14) {
pos = name.indexOf('-', pos);
if (pos != -1) {
hPos[hyphenCnt++] = pos;
pos++;
}
}
if (hyphenCnt != 14) {
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger()
.severe("Font Configuration Font ID is malformed:" + name);
}
return name; // what else can we do?
}
StringBuffer sb =
new StringBuffer(name.substring(hPos[FAMILY_NAME_FIELD-1],
hPos[SETWIDTH_NAME_FIELD]));
sb.append(name.substring(hPos[CHARSET_REGISTRY_FIELD-1]));
String retval = sb.toString().toLowerCase (Locale.ENGLISH);
return retval;
}
private String switchFontIDForName(String name) {
int[] hPos = new int[14];
int hyphenCnt = 1;
int pos = 1;
while (pos != -1 && hyphenCnt < 14) {
pos = name.indexOf('-', pos);
if (pos != -1) {
hPos[hyphenCnt++] = pos;
pos++;
}
}
if (hyphenCnt != 14) {
if (FontUtilities.debugFonts()) {
FontUtilities.getLogger()
.severe("Font Configuration Font ID is malformed:" + name);
}
return name; // what else can we do?
}
String slant = name.substring(hPos[SLANT_FIELD-1]+1,
hPos[SLANT_FIELD]);
String family = name.substring(hPos[FAMILY_NAME_FIELD-1]+1,
hPos[FAMILY_NAME_FIELD]);
String registry = name.substring(hPos[CHARSET_REGISTRY_FIELD-1]+1,
hPos[CHARSET_REGISTRY_FIELD]);
String encoding = name.substring(hPos[CHARSET_ENCODING_FIELD-1]+1);
if (slant.equals("i")) {
slant = "o";
} else if (slant.equals("o")) {
slant = "i";
}
// workaround for #4471000
if (family.equals("itc zapfdingbats")
&& registry.equals("sun")
&& encoding.equals("fontspecific")){
registry = "adobe";
}
StringBuffer sb =
new StringBuffer(name.substring(hPos[FAMILY_NAME_FIELD-1],
hPos[SLANT_FIELD-1]+1));
sb.append(slant);
sb.append(name.substring(hPos[SLANT_FIELD],
hPos[SETWIDTH_NAME_FIELD]+1));
sb.append(registry);
sb.append(name.substring(hPos[CHARSET_ENCODING_FIELD-1]));
String retval = sb.toString().toLowerCase (Locale.ENGLISH);
return retval;
}
/**
* Returns the face name for the given XLFD.
*/
public String getFileNameFromXLFD(String name) {
String fileName = null;
String fontID = specificFontIDForName(name);
if (fontID != null) {
fileName = (String)fontNameMap.get(fontID);
if (fileName == null) {
fontID = switchFontIDForName(name);
fileName = (String)fontNameMap.get(fontID);
}
if (fileName == null) {
fileName = getDefaultFontFile();
}
}
return fileName;
}
/* Register just the paths, (it doesn't register the fonts).
* If a font configuration file has specified a baseFontPath
* fontPath is just those directories, unless on usage we
* find it doesn't contain what we need for the logical fonts.
* Otherwise, we register all the paths on Solaris, because
* the fontPath we have here is the complete one from
* parsing /var/sadm/install/contents, not just
* what's on the X font path (may be this should be
* changed).
* But for now what it means is that if we didn't do
* this then if the font weren't listed anywhere on the
* less complete font path we'd trigger loadFonts which
* actually registers the fonts. This may actually be
* the right thing tho' since that would also set up
* the X font path without which we wouldn't be able to
* display some "native" fonts.
* So something to revisit is that probably fontPath
* here ought to be only the X font path + jre font dir.
* loadFonts should have a separate native call to
* get the rest of the platform font path.
*
* Registering the directories can now be avoided in the
* font configuration initialisation when filename entries
* exist in the font configuration file for all fonts.
* (Perhaps a little confusingly a filename entry is
* actually keyed using the XLFD used in the font entries,
* and it maps *to* a real filename).
* In the event any are missing, registration of all
* directories will be invoked to find the real files.
*
* But registering the directory performed other
* functions such as filling in the map of all native names
* for the font. So when this method isn't invoked, they still
* must be found. This is mitigated by getNativeNames now
* being able to return at least the platform name, but mostly
* by ensuring that when a filename key is found, that
* xlfd key is stored as one of the set of platform names
* for the font. Its a set because typical font configuration
* files reference the same CJK font files using multiple
* X11 encodings. For the code that adds this to the map
* see X11GE.getFileNameFromPlatformName(..)
* If you don't get all of these then some code points may
* not use the Xserver, and will not get the PCF bitmaps
* that are available for some point sizes.
* So, in the event that there is such a problem,
* unconditionally making this call may be necessary, at
* some cost to JRE start-up
*/
@Override
protected void registerFontDirs(String pathName) {
StringTokenizer parser = new StringTokenizer(pathName,
File.pathSeparator);
try {
while (parser.hasMoreTokens()) {
String dirPath = parser.nextToken();
if (dirPath != null && !registeredDirs.containsKey(dirPath)) {
registeredDirs.put(dirPath, null);
registerFontDir(dirPath);
}
}
} catch (NoSuchElementException e) {
}
}
// An X font spec (xlfd) includes an encoding. The same TrueType font file
// may be referenced from different X font directories in font.dir files
// to support use in multiple encodings by X apps.
// So for the purposes of font configuration logical fonts where AWT
// heavyweights need to access the font via X APIs we need to ensure that
// the directory for precisely the encodings needed by this are added to
// the x font path. This requires that we note the platform names
// specified in font configuration files and use that to identify the
// X font directory that contains a font.dir file for that platform name
// and add it to the X font path (if display is local)
// Here we make use of an already built map of xlfds to font locations
// to add the font location to the set of those required to build the
// x font path needed by AWT.
// These are added to the x font path later.
// All this is necessary because on Solaris the font.dir directories
// may contain not real font files, but symbolic links to the actual
// location but that location is not suitable for the x font path, since
// it probably doesn't have a font.dir at all and certainly not one
// with the required encodings
// If the fontconfiguration file is properly set up so that all fonts
// are mapped to files then we will never trigger initialising
// xFontDirsMap (it will be null). In this case the awtfontpath entries
// must specify all the X11 directories needed by AWT.
@Override
protected void addFontToPlatformFontPath(String platformName) {
// Lazily initialize fontConfigDirs.
getPlatformFontPathFromFontConfig();
if (xFontDirsMap != null) {
String fontID = specificFontIDForName(platformName);
String dirName = (String)xFontDirsMap.get(fontID);
if (dirName != null) {
fontConfigDirs.add(dirName);
}
}
return;
}
private void getPlatformFontPathFromFontConfig() {
if (fontConfigDirs == null) {
fontConfigDirs = getFontConfiguration().getAWTFontPathSet();
if (FontUtilities.debugFonts() && fontConfigDirs != null) {
String[] names = fontConfigDirs.toArray(new String[0]);
for (int i=0;i<names.length;i++) {
FontUtilities.getLogger().info("awtfontpath : " + names[i]);
}
}
}
}
@Override
protected void registerPlatformFontsUsedByFontConfiguration() {
// Lazily initialize fontConfigDirs.
getPlatformFontPathFromFontConfig();
if (fontConfigDirs == null) {
return;
}
if (FontUtilities.isLinux) {
fontConfigDirs.add(jreLibDirName+File.separator+"oblique-fonts");
}
fontdirs = (String[])fontConfigDirs.toArray(new String[0]);
}
// Implements SunGraphicsEnvironment.createFontConfiguration.
protected FontConfiguration createFontConfiguration() {
/* The logic here decides whether to use a preconfigured
* fontconfig.properties file, or synthesise one using platform APIs.
* On Solaris (as opposed to OpenSolaris) we try to use the
* pre-configured ones, but if the files it specifies are missing
* we fail-safe to synthesising one. This might happen if Solaris
* changes its fonts.
* For OpenSolaris I don't expect us to ever create fontconfig files,
* so it will always synthesise. Note that if we misidentify
* OpenSolaris as Solaris, then the test for the presence of
* Solaris-only font files will correct this.
* For Linux we require an exact match of distro and version to
* use the preconfigured file, and also that it points to
* existent fonts.
* If synthesising fails, we fall back to any preconfigured file
* and do the best we can. For the commercial JDK this will be
* fine as it includes the Lucida fonts. OpenJDK should not hit
* this as the synthesis should always work on its platforms.
*/
FontConfiguration mFontConfig = new MFontConfiguration(this);
if (FontUtilities.isOpenSolaris ||
(FontUtilities.isLinux &&
(!mFontConfig.foundOsSpecificFile() ||
!mFontConfig.fontFilesArePresent()) ||
(FontUtilities.isSolaris && !mFontConfig.fontFilesArePresent()))) {
FcFontConfiguration fcFontConfig =
new FcFontConfiguration(this);
if (fcFontConfig.init()) {
return fcFontConfig;
}
}
mFontConfig.init();
return mFontConfig;
}
public FontConfiguration
createFontConfiguration(boolean preferLocaleFonts,
boolean preferPropFonts) {
return new MFontConfiguration(this,
preferLocaleFonts, preferPropFonts);
}
public synchronized native String getFontPathNative(boolean noType1Fonts);
protected synchronized String getFontPath(boolean noType1Fonts) {
isHeadless(); // make sure GE is inited, as its the X11 lock.
return getFontPathNative(noType1Fonts);
}
public String[] getDefaultPlatformFont() {
if (defaultPlatformFont != null) {
return defaultPlatformFont;
}
String[] info = new String[2];
getFontConfigManager().initFontConfigFonts(false);
FontConfigManager.FcCompFont[] fontConfigFonts =
getFontConfigManager().getFontConfigFonts();
for (int i=0; i<fontConfigFonts.length; i++) {
if ("sans".equals(fontConfigFonts[i].fcFamily) &&
0 == fontConfigFonts[i].style) {
info[0] = fontConfigFonts[i].firstFont.familyName;
info[1] = fontConfigFonts[i].firstFont.fontFile;
break;
}
}
/* Absolute last ditch attempt in the face of fontconfig problems.
* If we didn't match, pick the first, or just make something
* up so we don't NPE.
*/
if (info[0] == null) {
if (fontConfigFonts.length > 0 &&
fontConfigFonts[0].firstFont.fontFile != null) {
info[0] = fontConfigFonts[0].firstFont.familyName;
info[1] = fontConfigFonts[0].firstFont.fontFile;
} else {
info[0] = "Dialog";
info[1] = "/dialog.ttf";
}
}
defaultPlatformFont = info;
return defaultPlatformFont;
}
public synchronized FontConfigManager getFontConfigManager() {
if (fcManager == null) {
fcManager = new FontConfigManager();
}
return fcManager;
}
@Override
protected FontUIResource getFontConfigFUIR(String family, int style, int size) {
CompositeFont font2D = getFontConfigManager().getFontConfigFont(family, style);
if (font2D == null) { // Not expected, just a precaution.
return new FontUIResource(family, style, size);
}
/* The name of the font will be that of the physical font in slot,
* but by setting the handle to that of the CompositeFont it
* renders as that CompositeFont.
* It also needs to be marked as a created font which is the
* current mechanism to signal that deriveFont etc must copy
* the handle from the original font.
*/
FontUIResource fuir =
new FontUIResource(font2D.getFamilyName(null), style, size);
FontAccess.getFontAccess().setFont2D(fuir, font2D.handle);
FontAccess.getFontAccess().setCreatedFont(fuir);
return fuir;
}
}
|
apache/hive | 36,285 | ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.vector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.exec.AbstractMapOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
import org.apache.hadoop.hive.ql.io.orc.OrcStruct;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc;
import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc.VectorMapOperatorReadType;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleDeserializeRead;
import org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinaryDeserializeRead;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.BinaryComparable;
import org.apache.hadoop.io.Writable;
import com.google.common.base.Preconditions;
/*
*
* The vectorized MapOperator.
*
* There are 3 modes of reading for vectorization:
*
* 1) One for the Vectorized Input File Format which returns VectorizedRowBatch as the row.
*
* 2) One for using VectorDeserializeRow to deserialize each row into the VectorizedRowBatch.
* Currently, these Input File Formats:
* TEXTFILE
* SEQUENCEFILE
*
* 3) And one using the regular partition deserializer to get the row object and assigning
* the row object into the VectorizedRowBatch with VectorAssignRow.
* This picks up Input File Format not supported by the other two.
*/
public class VectorMapOperator extends AbstractMapOperator {
private static final long serialVersionUID = 1L;
/*
* Overall information on this vectorized Map operation.
*/
private transient HashMap<Path, VectorPartitionContext> fileToPartitionContextMap;
private transient Operator<? extends OperatorDesc> oneRootOperator;
private transient TypeInfo tableStructTypeInfo;
private transient StandardStructObjectInspector tableStandardStructObjectInspector;
private transient TypeInfo[] tableRowTypeInfos;
private transient int[] dataColumnNums;
private transient StandardStructObjectInspector neededStandardStructObjectInspector;
private transient VectorizedRowBatchCtx batchContext;
// The context for creating the VectorizedRowBatch for this Map node that
// the Vectorizer class determined.
/*
* A different batch for vectorized Input File Format readers so they can do their work
* overlapped with work of the row collection that vector/row deserialization does. This allows
* the partitions to mix modes (e.g. for us to flush the previously batched rows on file change).
*/
private transient VectorizedRowBatch vectorizedInputFileFormatBatch;
/*
* This batch is only used by vector/row deserializer readers.
*/
private transient VectorizedRowBatch deserializerBatch;
private transient long batchCounter;
private transient int dataColumnCount;
private transient int partitionColumnCount;
private transient Object[] partitionValues;
private transient int virtualColumnCount;
private transient boolean hasRowIdentifier;
private transient int rowIdentifierColumnNum;
private transient boolean[] dataColumnsToIncludeTruncated;
/*
* The following members have context information for the current partition file being read.
*/
private transient VectorMapOperatorReadType currentReadType;
private transient VectorPartitionContext currentVectorPartContext;
// Current vector map operator read type and context.
private transient int currentDataColumnCount;
// The number of data columns that the current reader will return.
// Only applicable for vector/row deserialization.
private transient DeserializeRead currentDeserializeRead;
private transient VectorDeserializeRow currentVectorDeserializeRow;
// When we are doing vector deserialization, these are the fast deserializer and
// the vector row deserializer.
private Deserializer currentPartDeserializer;
private StructObjectInspector currentPartRawRowObjectInspector;
private VectorAssignRow currentVectorAssign;
// When we are doing row deserialization, these are the regular deserializer,
// partition object inspector, and vector row assigner.
/*
* The abstract context for the 3 kinds of vectorized reading.
*/
protected abstract class VectorPartitionContext {
protected final PartitionDesc partDesc;
String tableName;
String partName;
/*
* Initialization here is adapted from MapOperator.MapOpCtx.initObjectInspector method.
*/
private VectorPartitionContext(PartitionDesc partDesc) {
this.partDesc = partDesc;
TableDesc td = partDesc.getTableDesc();
// Use table properties in case of unpartitioned tables,
// and the union of table properties and partition properties, with partition
// taking precedence, in the case of partitioned tables
Properties overlayedProps =
SerDeUtils.createOverlayedProperties(td.getProperties(), partDesc.getProperties());
Map<String, String> partSpec = partDesc.getPartSpec();
tableName = String.valueOf(overlayedProps.getProperty("name"));
partName = String.valueOf(partSpec);
}
public PartitionDesc getPartDesc() {
return partDesc;
}
/*
* Override this for concrete initialization.
*/
public abstract void init(Configuration hconf)
throws SerDeException, Exception;
/*
* How many data columns is the partition reader actually supplying?
*/
public abstract int getReaderDataColumnCount();
}
/*
* Context for reading a Vectorized Input File Format.
*/
protected class VectorizedInputFileFormatPartitionContext extends VectorPartitionContext {
private VectorizedInputFileFormatPartitionContext(PartitionDesc partDesc) {
super(partDesc);
}
public void init(Configuration hconf) {
}
@Override
public int getReaderDataColumnCount() {
throw new RuntimeException("Not applicable");
}
}
/*
* Context for using VectorDeserializeRow to deserialize each row from the Input File Format
* into the VectorizedRowBatch.
*/
protected class VectorDeserializePartitionContext extends VectorPartitionContext {
// This helper object deserializes known deserialization / input file format combination into
// columns of a row in a vectorized row batch.
private VectorDeserializeRow vectorDeserializeRow;
private DeserializeRead deserializeRead;
private int readerColumnCount;
private VectorDeserializePartitionContext(PartitionDesc partDesc) {
super(partDesc);
}
public VectorDeserializeRow getVectorDeserializeRow() {
return vectorDeserializeRow;
}
DeserializeRead getDeserializeRead() {
return deserializeRead;
}
@Override
public int getReaderDataColumnCount() {
return readerColumnCount;
}
public void init(Configuration hconf)
throws SerDeException, HiveException {
VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc();
// This type information specifies the data types the partition needs to read.
TypeInfo[] dataTypeInfos = vectorPartDesc.getDataTypeInfos();
// We need to provide the minimum number of columns to be read so
// LazySimpleDeserializeRead's separator parser does not waste time.
//
Preconditions.checkState(dataColumnsToIncludeTruncated != null);
TypeInfo[] minimalDataTypeInfos;
if (dataColumnsToIncludeTruncated.length < dataTypeInfos.length) {
minimalDataTypeInfos =
Arrays.copyOf(dataTypeInfos, dataColumnsToIncludeTruncated.length);
} else {
minimalDataTypeInfos = dataTypeInfos;
}
readerColumnCount = minimalDataTypeInfos.length;
switch (vectorPartDesc.getVectorDeserializeType()) {
case LAZY_SIMPLE:
{
LazySerDeParameters simpleSerdeParams =
new LazySerDeParameters(hconf, partDesc.getTableDesc().getProperties(),
LazySimpleSerDe.class.getName());
LazySimpleDeserializeRead lazySimpleDeserializeRead =
new LazySimpleDeserializeRead(
minimalDataTypeInfos,
batchContext.getRowdataTypePhysicalVariations(),
/* useExternalBuffer */ true,
simpleSerdeParams);
vectorDeserializeRow =
new VectorDeserializeRow<LazySimpleDeserializeRead>(lazySimpleDeserializeRead);
// Initialize with data row type conversion parameters.
vectorDeserializeRow.initConversion(tableRowTypeInfos, dataColumnsToIncludeTruncated);
deserializeRead = lazySimpleDeserializeRead;
}
break;
case LAZY_BINARY:
{
LazyBinaryDeserializeRead lazyBinaryDeserializeRead =
new LazyBinaryDeserializeRead(
dataTypeInfos,
/* useExternalBuffer */ true);
vectorDeserializeRow =
new VectorDeserializeRow<LazyBinaryDeserializeRead>(lazyBinaryDeserializeRead);
// Initialize with data row type conversion parameters.
vectorDeserializeRow.initConversion(tableRowTypeInfos, dataColumnsToIncludeTruncated);
deserializeRead = lazyBinaryDeserializeRead;
}
break;
default:
throw new RuntimeException(
"Unexpected vector deserialize row type " + vectorPartDesc.getVectorDeserializeType().name());
}
}
}
/*
* Context for reading using the regular partition deserializer to get the row object and
* assigning the row object into the VectorizedRowBatch with VectorAssignRow
*/
protected class RowDeserializePartitionContext extends VectorPartitionContext {
private Deserializer partDeserializer;
private StructObjectInspector partRawRowObjectInspector;
private VectorAssignRow vectorAssign;
private int readerColumnCount;
private RowDeserializePartitionContext(PartitionDesc partDesc) {
super(partDesc);
}
public Deserializer getPartDeserializer() {
return partDeserializer;
}
public StructObjectInspector getPartRawRowObjectInspector() {
return partRawRowObjectInspector;
}
public VectorAssignRow getVectorAssign() {
return vectorAssign;
}
@Override
public int getReaderDataColumnCount() {
return readerColumnCount;
}
public void init(Configuration hconf)
throws Exception {
VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc();
partDeserializer = partDesc.getDeserializer(hconf);
if (partDeserializer instanceof OrcSerde) {
// UNDONE: We need to get the table schema inspector from self-describing Input File
// Formats like ORC. Modify the ORC serde instead? For now, this works.
partRawRowObjectInspector =
(StructObjectInspector) OrcStruct.createObjectInspector(tableStructTypeInfo);
} else {
partRawRowObjectInspector =
(StructObjectInspector) partDeserializer.getObjectInspector();
}
TypeInfo[] dataTypeInfos = vectorPartDesc.getDataTypeInfos();
vectorAssign = new VectorAssignRow();
// Initialize with data type conversion parameters.
readerColumnCount =
vectorAssign.initConversion(dataTypeInfos, tableRowTypeInfos, dataColumnsToIncludeTruncated);
}
}
public VectorPartitionContext createAndInitPartitionContext(PartitionDesc partDesc,
Configuration hconf)
throws SerDeException, Exception {
VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc();
if (vectorPartDesc == null) {
return null;
}
VectorPartitionContext vectorPartitionContext;
VectorMapOperatorReadType vectorMapOperatorReadType =
vectorPartDesc.getVectorMapOperatorReadType();
if (vectorMapOperatorReadType == VectorMapOperatorReadType.VECTOR_DESERIALIZE ||
vectorMapOperatorReadType == VectorMapOperatorReadType.ROW_DESERIALIZE) {
// Verify hive.exec.schema.evolution is true or we have an ACID table so we are producing
// the table schema from ORC. The Vectorizer class assures this.
boolean isAcid =
AcidUtils.isTablePropertyTransactional(partDesc.getTableDesc().getProperties());
Preconditions.checkState(Utilities.isSchemaEvolutionEnabled(hconf, isAcid));
}
switch (vectorMapOperatorReadType) {
case VECTORIZED_INPUT_FILE_FORMAT:
vectorPartitionContext = new VectorizedInputFileFormatPartitionContext(partDesc);
break;
case VECTOR_DESERIALIZE:
vectorPartitionContext = new VectorDeserializePartitionContext(partDesc);
break;
case ROW_DESERIALIZE:
vectorPartitionContext = new RowDeserializePartitionContext(partDesc);
break;
default:
throw new RuntimeException("Unexpected vector MapOperator read type " +
vectorMapOperatorReadType.name());
}
vectorPartitionContext.init(hconf);
return vectorPartitionContext;
}
private void determineDataColumnsToIncludeTruncated() {
Preconditions.checkState(batchContext != null);
Preconditions.checkState(dataColumnNums != null);
boolean[] columnsToInclude = new boolean[dataColumnCount];;
final int count = dataColumnNums.length;
int columnNum = -1;
for (int i = 0; i < count; i++) {
columnNum = dataColumnNums[i];
Preconditions.checkState(columnNum < dataColumnCount);
columnsToInclude[columnNum] = true;
}
if (columnNum == -1) {
dataColumnsToIncludeTruncated = new boolean[0];
} else {
dataColumnsToIncludeTruncated = Arrays.copyOf(columnsToInclude, columnNum + 1);
}
}
/** Kryo ctor. */
public VectorMapOperator() {
super();
}
public VectorMapOperator(CompilationOpContext ctx) {
super(ctx);
}
/*
* This is the same as the setChildren method below but for empty tables.
*/
@Override
public void initEmptyInputChildren(List<Operator<?>> children, Configuration hconf)
throws SerDeException, Exception {
// Get the single TableScanOperator. Vectorization only supports one input tree.
Preconditions.checkState(children.size() == 1);
oneRootOperator = children.get(0);
internalSetChildren(hconf);
}
@Override
public void setChildren(Configuration hconf) throws Exception {
// Get the single TableScanOperator. Vectorization only supports one input tree.
Iterator<Operator<? extends OperatorDesc>> aliasToWorkIterator =
conf.getAliasToWork().values().iterator();
oneRootOperator = aliasToWorkIterator.next();
Preconditions.checkState(!aliasToWorkIterator.hasNext());
internalSetChildren(hconf);
}
/*
* Create information for vector map operator.
* The member oneRootOperator has been set.
*/
private void internalSetChildren(Configuration hconf) throws Exception {
// The setupPartitionContextVars uses the prior read type to flush the prior deserializerBatch,
// so set it here to none.
currentReadType = VectorMapOperatorReadType.NONE;
batchContext = conf.getVectorizedRowBatchCtx();
/*
* Use a different batch for vectorized Input File Format readers so they can do their work
* overlapped with work of the row collection that vector/row deserialization does. This allows
* the partitions to mix modes (e.g. for us to flush the previously batched rows on file change).
*/
vectorizedInputFileFormatBatch =
batchContext.createVectorizedRowBatch();
conf.setVectorizedRowBatch(vectorizedInputFileFormatBatch);
/*
* This batch is used by vector/row deserializer readers.
*/
deserializerBatch = batchContext.createVectorizedRowBatch();
batchCounter = 0;
dataColumnCount = batchContext.getDataColumnCount();
partitionColumnCount = batchContext.getPartitionColumnCount();
partitionValues = new Object[partitionColumnCount];
virtualColumnCount = batchContext.getVirtualColumnCount();
rowIdentifierColumnNum = batchContext.findVirtualColumnNum(VirtualColumn.ROWID);
hasRowIdentifier = (rowIdentifierColumnNum != -1);
dataColumnNums = batchContext.getDataColumnNums();
Preconditions.checkState(dataColumnNums != null);
// Form a truncated boolean include array for our vector/row deserializers.
determineDataColumnsToIncludeTruncated();
/*
* Create table related objects
*/
final String[] rowColumnNames = batchContext.getRowColumnNames();
final TypeInfo[] rowColumnTypeInfos = batchContext.getRowColumnTypeInfos();
tableStructTypeInfo =
TypeInfoFactory.getStructTypeInfo(
Arrays.asList(rowColumnNames),
Arrays.asList(rowColumnTypeInfos));
tableStandardStructObjectInspector =
(StandardStructObjectInspector)
TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(tableStructTypeInfo);
tableRowTypeInfos = batchContext.getRowColumnTypeInfos();
/*
* NOTE: We do not alter the projectedColumns / projectionSize of the batches to just be
* the included columns (+ partition columns).
*
* For now, we need to model the object inspector rows because there are still several
* vectorized operators that use them.
*
* We need to continue to model the Object[] as having null objects for not included columns
* until the following has been fixed:
* o When we have to output a STRUCT for AVG we switch to row GroupBy operators.
* o Some variations of VectorMapOperator, VectorReduceSinkOperator, VectorFileSinkOperator
* use the row super class to process rows.
*/
/*
* The Vectorizer class enforces that there is only one TableScanOperator, so
* we don't need the more complicated multiple root operator mapping that MapOperator has.
*/
fileToPartitionContextMap = new HashMap<>();
// Temporary map so we only create one partition context entry.
HashMap<PartitionDesc, VectorPartitionContext> partitionContextMap =
new HashMap<PartitionDesc, VectorPartitionContext>();
for (Map.Entry<Path, List<String>> entry : conf.getPathToAliases().entrySet()) {
Path path = entry.getKey();
PartitionDesc partDesc = conf.getPathToPartitionInfo().get(path);
VectorPartitionContext vectorPartitionContext;
if (!partitionContextMap.containsKey(partDesc)) {
vectorPartitionContext = createAndInitPartitionContext(partDesc, hconf);
partitionContextMap.put(partDesc, vectorPartitionContext);
} else {
vectorPartitionContext = partitionContextMap.get(partDesc);
}
fileToPartitionContextMap.put(path, vectorPartitionContext);
}
// Create list of one.
List<Operator<? extends OperatorDesc>> children =
new ArrayList<Operator<? extends OperatorDesc>>();
children.add(oneRootOperator);
setChildOperators(children);
}
@Override
public void initializeMapOperator(Configuration hconf) throws HiveException {
super.initializeMapOperator(hconf);
oneRootOperator.initialize(hconf, new ObjectInspector[] {tableStandardStructObjectInspector});
}
public void initializeContexts() throws HiveException {
Path fpath = getExecContext().getCurrentInputPath();
Path nominalPath = getNominalPath(fpath);
setupPartitionContextVars(nominalPath);
}
// Find context for current input file
@Override
public void cleanUpInputFileChangedOp() throws HiveException {
super.cleanUpInputFileChangedOp();
Path fpath = getExecContext().getCurrentInputPath();
Path nominalPath = getNominalPath(fpath);
setupPartitionContextVars(nominalPath);
// Add alias, table name, and partitions to hadoop conf so that their
// children will inherit these
oneRootOperator.setInputContext(currentVectorPartContext.tableName,
currentVectorPartContext.partName);
}
private void setRowIdentiferToNull(VectorizedRowBatch batch) {
ColumnVector rowIdentifierColVector = batch.cols[rowIdentifierColumnNum];
rowIdentifierColVector.isNull[0] = true;
rowIdentifierColVector.noNulls = false;
rowIdentifierColVector.isRepeating = true;
}
/*
* Flush a partially full deserializerBatch.
* @return Return true if the operator tree is not done yet.
*/
private boolean flushDeserializerBatch() throws HiveException {
if (deserializerBatch.size > 0) {
batchCounter++;
oneRootOperator.process(deserializerBatch, 0);
deserializerBatch.reset();
if (oneRootOperator.getDone()) {
setDone(true);
return false;
}
}
return true;
}
/*
* Setup the context for reading from the next partition file.
*/
private void setupPartitionContextVars(Path nominalPath) throws HiveException {
currentVectorPartContext = fileToPartitionContextMap.get(nominalPath);
if (currentVectorPartContext == null) {
return;
}
PartitionDesc partDesc = currentVectorPartContext.getPartDesc();
VectorPartitionDesc vectorPartDesc = partDesc.getVectorPartitionDesc();
currentReadType = vectorPartDesc.getVectorMapOperatorReadType();
/*
* Setup for 3 different kinds of vectorized reading supported:
*
* 1) Read the Vectorized Input File Format which returns VectorizedRowBatch as the row.
*
* 2) Read using VectorDeserializeRow to deserialize each row into the VectorizedRowBatch.
*
* 3) And read using the regular partition deserializer to get the row object and assigning
* the row object into the VectorizedRowBatch with VectorAssignRow.
*/
if (currentReadType == VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) {
/*
* The Vectorized Input File Format reader is responsible for setting the partition column
* values, resetting and filling in the batch, etc.
*/
/*
* Clear all the reading variables.
*/
currentDataColumnCount = 0;
currentDeserializeRead = null;
currentVectorDeserializeRow = null;
currentPartDeserializer = null;
currentPartRawRowObjectInspector = null;
currentVectorAssign = null;
} else {
/*
* We will get "regular" single rows from the Input File Format reader that we will need
* to {vector|row} deserialize.
*/
Preconditions.checkState(
currentReadType == VectorMapOperatorReadType.VECTOR_DESERIALIZE ||
currentReadType == VectorMapOperatorReadType.ROW_DESERIALIZE);
/*
* Clear out any rows in the batch from previous partition since we are going to change
* the repeating partition column values.
*/
if (!flushDeserializerBatch()) {
// Operator tree is now done.
return;
}
/*
* For this particular file, how many columns will we actually read?
*/
currentDataColumnCount = currentVectorPartContext.getReaderDataColumnCount();
if (currentDataColumnCount < dataColumnCount) {
/*
* Default any additional data columns to NULL once for the file (if they are present).
*/
for (int i = currentDataColumnCount; i < dataColumnCount; i++) {
ColumnVector colVector = deserializerBatch.cols[i];
if (colVector != null) {
colVector.isNull[0] = true;
colVector.noNulls = false;
colVector.isRepeating = true;
}
}
}
if (batchContext.getPartitionColumnCount() > 0) {
/*
* The partition columns are set once for the partition and are marked repeating.
*/
VectorizedRowBatchCtx.getPartitionValues(batchContext, partDesc, partitionValues);
batchContext.addPartitionColsToBatch(deserializerBatch, partitionValues);
}
if (hasRowIdentifier) {
// No ACID in code path -- set ROW__ID to NULL.
setRowIdentiferToNull(deserializerBatch);
}
/*
* Set or clear the rest of the reading variables based on {vector|row} deserialization.
*/
switch (currentReadType) {
case VECTOR_DESERIALIZE:
{
VectorDeserializePartitionContext vectorDeserPartContext =
(VectorDeserializePartitionContext) currentVectorPartContext;
// Set ours.
currentDeserializeRead = vectorDeserPartContext.getDeserializeRead();
currentVectorDeserializeRow = vectorDeserPartContext.getVectorDeserializeRow();
// Clear the other ones.
currentPartDeserializer = null;
currentPartRawRowObjectInspector = null;
currentVectorAssign = null;
}
break;
case ROW_DESERIALIZE:
{
RowDeserializePartitionContext rowDeserPartContext =
(RowDeserializePartitionContext) currentVectorPartContext;
// Clear the other ones.
currentDeserializeRead = null;
currentVectorDeserializeRow = null;
// Set ours.
currentPartDeserializer = rowDeserPartContext.getPartDeserializer();
currentPartRawRowObjectInspector = rowDeserPartContext.getPartRawRowObjectInspector();
currentVectorAssign = rowDeserPartContext.getVectorAssign();
}
break;
default:
throw new RuntimeException("Unexpected VectorMapOperator read type " +
currentReadType.name());
}
}
}
@Override
public Deserializer getCurrentDeserializer() {
// Not applicable.
return null;
}
/*
* Deliver a vector batch to the operator tree.
*
* The Vectorized Input File Format reader has already set the partition column
* values, reset and filled in the batch, etc.
*
* We pass the VectorizedRowBatch through here.
*
* @return Return true if the operator tree is not done yet.
*/
private boolean deliverVectorizedRowBatch(Writable value) throws HiveException {
batchCounter++;
if (value != null) {
VectorizedRowBatch batch = (VectorizedRowBatch) value;
numRows += batch.size;
if (hasRowIdentifier) {
final int idx = batchContext.findVirtualColumnNum(VirtualColumn.ROWID);
if (idx < 0) {
setRowIdentiferToNull(batch);
}
}
}
oneRootOperator.process(value, 0);
if (oneRootOperator.getDone()) {
setDone(true);
return false;
}
return true;
}
/**
* Reset all the columns excluding
* - partition columns because they are read only once per file during first batch read
* - any columns that defaulted to NULL because they are not present in this partition
*
* See {@link #setupPartitionContextVars(Path)}
*/
private void resetVectorizedRowBatchForDeserialize() {
/**
* Reset existing input columns
*/
for (int c = 0; c < currentDataColumnCount; c++) {
resetColumnVector(deserializerBatch.cols[c]);
}
/**
* Reset output and scratch columns
*/
for (int c = dataColumnCount + partitionColumnCount; c < deserializerBatch.cols.length; c++) {
if (c == rowIdentifierColumnNum) {
continue;
}
resetColumnVector(deserializerBatch.cols[c]);
}
deserializerBatch.selectedInUse = false;
deserializerBatch.size = 0;
deserializerBatch.endOfFile = false;
}
private void resetColumnVector(ColumnVector columnVector) {
if (columnVector != null) {
columnVector.reset();
columnVector.init();
}
}
@Override
public void process(Writable value) throws HiveException {
// A mapper can span multiple files/partitions.
// The VectorPartitionContext need to be changed if the input file changed
ExecMapperContext context = getExecContext();
if (context != null && context.inputFileChanged()) {
// The child operators cleanup if input file has changed
cleanUpInputFileChanged();
}
if (!oneRootOperator.getDone()) {
/*
* 3 different kinds of vectorized reading supported:
*
* 1) Read the Vectorized Input File Format which returns VectorizedRowBatch as the row.
*
* 2) Read using VectorDeserializeRow to deserialize each row into the VectorizedRowBatch.
*
* 3) And read using the regular partition deserializer to get the row object and assigning
* the row object into the VectorizedRowBatch with VectorAssignRow.
*/
try {
if (currentReadType == VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) {
if (!deliverVectorizedRowBatch(value)) {
// Operator tree is now done.
return;
}
} else if (value instanceof VectorizedRowBatch) {
/*
* This case can happen with LLAP. If it is able to deserialize and cache data from the
* input format, it will deliver that cached data to us as VRBs.
*/
/*
* Clear out any rows we may have processed in row-mode for the current partition..
*/
if (!flushDeserializerBatch()) {
// Operator tree is now done.
return;
}
if (!deliverVectorizedRowBatch(value)) {
// Operator tree is now done.
return;
}
} else {
/*
* We have a "regular" single rows from the Input File Format reader that we will need
* to deserialize.
*/
Preconditions.checkState(
currentReadType == VectorMapOperatorReadType.VECTOR_DESERIALIZE ||
currentReadType == VectorMapOperatorReadType.ROW_DESERIALIZE);
if (deserializerBatch.size == deserializerBatch.DEFAULT_SIZE) {
numRows += deserializerBatch.size;
/*
* Feed current full batch to operator tree.
*/
batchCounter++;
oneRootOperator.process(deserializerBatch, 0);
resetVectorizedRowBatchForDeserialize();
if (oneRootOperator.getDone()) {
setDone(true);
return;
}
}
/*
* Do the {vector|row} deserialization of the one row into the VectorizedRowBatch.
*/
switch (currentReadType) {
case VECTOR_DESERIALIZE:
{
BinaryComparable binComp = (BinaryComparable) value;
currentDeserializeRead.set(binComp.getBytes(), 0, binComp.getLength());
// Deserialize and append new row using the current batch size as the index.
try {
currentVectorDeserializeRow.deserialize(
deserializerBatch, deserializerBatch.size++);
} catch (Exception e) {
throw new HiveException(
"\nDeserializeRead detail: " +
currentVectorDeserializeRow.getDetailedReadPositionString(),
e);
}
}
break;
case ROW_DESERIALIZE:
{
Object deserialized = currentPartDeserializer.deserialize(value);
// Note: Regardless of what the Input File Format returns, we have determined
// with VectorAppendRow.initConversion that only currentDataColumnCount columns
// have values we want.
//
// Any extra columns needed by the table schema were set to repeating null
// in the batch by setupPartitionContextVars.
// Convert input row to standard objects.
List<Object> standardObjects = new ArrayList<Object>();
try {
ObjectInspectorUtils.copyToStandardObject(
standardObjects,
deserialized,
currentPartRawRowObjectInspector,
ObjectInspectorCopyOption.WRITABLE);
} catch (Exception e) {
throw new HiveException("copyToStandardObject failed: " + e);
}
if (standardObjects.size() < currentDataColumnCount) {
throw new HiveException("Input File Format returned row with too few columns");
}
// Append the deserialized standard object row using the current batch size
// as the index.
currentVectorAssign.assignRow(deserializerBatch, deserializerBatch.size++,
standardObjects, currentDataColumnCount);
}
break;
default:
throw new RuntimeException("Unexpected vector MapOperator read type " +
currentReadType.name());
}
}
} catch (Exception e) {
throw new HiveException("Hive Runtime Error while processing row ", e);
}
}
}
@Override
public void process(Object row, int tag) throws HiveException {
throw new HiveException("Hive 2 Internal error: should not be called!");
}
@Override
public void closeOp(boolean abort) throws HiveException {
if (!abort && oneRootOperator != null && !oneRootOperator.getDone() &&
currentReadType != VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) {
if (deserializerBatch.size > 0) {
numRows += deserializerBatch.size;
batchCounter++;
oneRootOperator.process(deserializerBatch, 0);
deserializerBatch.size = 0;
}
}
super.closeOp(abort);
}
@Override
public String getName() {
return getOperatorName();
}
static public String getOperatorName() {
return "MAP";
}
@Override
public OperatorType getType() {
return null;
}
}
|
openjdk/jdk8 | 36,255 | jaxp/src/com/sun/xml/internal/stream/dtd/nonvalidating/DTDGrammar.java | /*
* Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved.
*/
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.xml.internal.stream.dtd.nonvalidating;
import java.util.Hashtable;
import java.util.ArrayList;
import java.util.List;
import com.sun.org.apache.xerces.internal.util.SymbolTable;
import com.sun.org.apache.xerces.internal.xni.Augmentations;
import com.sun.org.apache.xerces.internal.xni.QName;
import com.sun.org.apache.xerces.internal.util.XMLSymbols;
import com.sun.org.apache.xerces.internal.xni.XMLLocator;
import com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier;
import com.sun.org.apache.xerces.internal.xni.XMLString;
import com.sun.org.apache.xerces.internal.xni.XNIException;
import com.sun.org.apache.xerces.internal.xni.parser.XMLDTDContentModelSource;
import com.sun.org.apache.xerces.internal.xni.parser.XMLDTDSource;
/**
* A DTD grammar. This class implements the XNI handler interfaces
* for DTD information so that it can build the approprate validation
* structures automatically from the callbacks.
*
* @author Eric Ye, IBM
* @author Jeffrey Rodriguez, IBM
* @author Andy Clark, IBM
* @author Neil Graham, IBM
*
*/
public class DTDGrammar {
/** Top level scope (-1). */
public static final int TOP_LEVEL_SCOPE = -1;
// private
/** Chunk shift (8). */
private static final int CHUNK_SHIFT = 8; // 2^8 = 256
/** Chunk size (1 << CHUNK_SHIFT). */
private static final int CHUNK_SIZE = (1 << CHUNK_SHIFT);
/** Chunk mask (CHUNK_SIZE - 1). */
private static final int CHUNK_MASK = CHUNK_SIZE - 1;
/** Initial chunk count (1 << (10 - CHUNK_SHIFT)). */
private static final int INITIAL_CHUNK_COUNT = (1 << (10 - CHUNK_SHIFT)); // 2^10 = 1k
/** List flag (0x80). */
private static final short LIST_FLAG = 0x80;
/** List mask (~LIST_FLAG). */
private static final short LIST_MASK = ~LIST_FLAG;
// debugging
/** Debug DTDGrammar. */
private static final boolean DEBUG = false;
//
// Data
//
protected XMLDTDSource fDTDSource = null;
protected XMLDTDContentModelSource fDTDContentModelSource = null;
/** Current element index. */
protected int fCurrentElementIndex;
/** Current attribute index. */
protected int fCurrentAttributeIndex;
/** fReadingExternalDTD */
protected boolean fReadingExternalDTD = false;
/** Symbol table. */
private SymbolTable fSymbolTable;
private ArrayList notationDecls = new ArrayList();
// element declarations
/** Number of element declarations. */
private int fElementDeclCount = 0;
/** Element declaration name. */
private QName fElementDeclName[][] = new QName[INITIAL_CHUNK_COUNT][];
/**
* Element declaration type.
* @see XMLElementDecl
*/
private short fElementDeclType[][] = new short[INITIAL_CHUNK_COUNT][];
/** First attribute declaration of an element declaration. */
private int fElementDeclFirstAttributeDeclIndex[][] = new int[INITIAL_CHUNK_COUNT][];
/** Last attribute declaration of an element declaration. */
private int fElementDeclLastAttributeDeclIndex[][] = new int[INITIAL_CHUNK_COUNT][];
// attribute declarations
/** Number of attribute declarations. */
private int fAttributeDeclCount = 0 ;
/** Attribute declaration name. */
private QName fAttributeDeclName[][] = new QName[INITIAL_CHUNK_COUNT][];
/**
* Attribute declaration type.
* @see XMLAttributeDecl
*/
private short fAttributeDeclType[][] = new short[INITIAL_CHUNK_COUNT][];
/** Attribute declaration enumeration values. */
private String[] fAttributeDeclEnumeration[][] = new String[INITIAL_CHUNK_COUNT][][];
private short fAttributeDeclDefaultType[][] = new short[INITIAL_CHUNK_COUNT][];
private String fAttributeDeclDefaultValue[][] = new String[INITIAL_CHUNK_COUNT][];
private String fAttributeDeclNonNormalizedDefaultValue[][] = new String[INITIAL_CHUNK_COUNT][];
private int fAttributeDeclNextAttributeDeclIndex[][] = new int[INITIAL_CHUNK_COUNT][];
/** Element index mapping table. */
private QNameHashtable fElementIndexMap = new QNameHashtable();
/** Temporary qualified name. */
private QName fQName = new QName();
/** Temporary Attribute decl. */
protected XMLAttributeDecl fAttributeDecl = new XMLAttributeDecl();
/** Element declaration. */
private XMLElementDecl fElementDecl = new XMLElementDecl();
/** Simple type. */
private XMLSimpleType fSimpleType = new XMLSimpleType();
/** table of XMLElementDecl */
Hashtable fElementDeclTab = new Hashtable();
/** Default constructor. */
public DTDGrammar(SymbolTable symbolTable) {
fSymbolTable = symbolTable;
}
public int getAttributeDeclIndex(int elementDeclIndex, String attributeDeclName) {
if (elementDeclIndex == -1) {
return -1;
}
int attDefIndex = getFirstAttributeDeclIndex(elementDeclIndex);
while (attDefIndex != -1) {
getAttributeDecl(attDefIndex, fAttributeDecl);
if (fAttributeDecl.name.rawname == attributeDeclName
|| attributeDeclName.equals(fAttributeDecl.name.rawname) ) {
return attDefIndex;
}
attDefIndex = getNextAttributeDeclIndex(attDefIndex);
}
return -1;
}
/**
* The start of the DTD.
*
* @param locator The document locator, or null if the document
* location cannot be reported during the parsing of
* the document DTD. However, it is <em>strongly</em>
* recommended that a locator be supplied that can
* at least report the base system identifier of the
* DTD.
*
* @param augs Additional information that may include infoset
* augmentations.
* @throws XNIException Thrown by handler to signal an error.
*/
public void startDTD(XMLLocator locator, Augmentations augs) throws XNIException {
} // startDTD(XMLLocator)
// startExternalSubset(Augmentations)
// endExternalSubset(Augmentations)
/**
* An element declaration.
*
* @param name The name of the element.
* @param contentModel The element content model.
* @param augs Additional information that may include infoset
* augmentations.
* @throws XNIException Thrown by handler to signal an error.
*/
public void elementDecl(String name, String contentModel, Augmentations augs)
throws XNIException {
XMLElementDecl tmpElementDecl = (XMLElementDecl) fElementDeclTab.get(name) ;
if ( tmpElementDecl != null ) {
if (tmpElementDecl.type == -1) {
fCurrentElementIndex = getElementDeclIndex(name);
}
else {
// duplicate element, ignored.
return;
}
}
else {
fCurrentElementIndex = createElementDecl();//create element decl
}
XMLElementDecl elementDecl = new XMLElementDecl();
QName elementName = new QName(null, name, name, null);
elementDecl.name.setValues(elementName);
elementDecl.scope= -1;
if (contentModel.equals("EMPTY")) {
elementDecl.type = XMLElementDecl.TYPE_EMPTY;
}
else if (contentModel.equals("ANY")) {
elementDecl.type = XMLElementDecl.TYPE_ANY;
}
else if (contentModel.startsWith("(") ) {
if (contentModel.indexOf("#PCDATA") > 0 ) {
elementDecl.type = XMLElementDecl.TYPE_MIXED;
}
else {
elementDecl.type = XMLElementDecl.TYPE_CHILDREN;
}
}
//add(or set) this elementDecl to the local cache
this.fElementDeclTab.put(name, elementDecl );
fElementDecl = elementDecl;
if ( DEBUG ) {
System.out.println( "name = " + fElementDecl.name.localpart );
System.out.println( "Type = " + fElementDecl.type );
}
setElementDecl(fCurrentElementIndex, fElementDecl );//set internal structure
int chunk = fCurrentElementIndex >> CHUNK_SHIFT;
ensureElementDeclCapacity(chunk);
}
/**
* An attribute declaration.
*
* @param elementName The name of the element that this attribute
* is associated with.
* @param attributeName The name of the attribute.
* @param type The attribute type. This value will be one of
* the following: "CDATA", "ENTITY", "ENTITIES",
* "ENUMERATION", "ID", "IDREF", "IDREFS",
* "NMTOKEN", "NMTOKENS", or "NOTATION".
* @param enumeration If the type has the value "ENUMERATION", this
* array holds the allowed attribute values;
* otherwise, this array is null.
* @param defaultType The attribute default type. This value will be
* one of the following: "#FIXED", "#IMPLIED",
* "#REQUIRED", or null.
* @param defaultValue The attribute default value, or null if no
* default value is specified.
* @param nonNormalizedDefaultValue The attribute default value with no normalization
* performed, or null if no default value is specified.
*
* @param augs Additional information that may include infoset
* augmentations.
* @throws XNIException Thrown by handler to signal an error.
*/
public void attributeDecl(String elementName, String attributeName,
String type, String[] enumeration,
String defaultType, XMLString defaultValue,
XMLString nonNormalizedDefaultValue, Augmentations augs) throws XNIException {
if (type != XMLSymbols.fCDATASymbol && defaultValue != null) {
normalizeDefaultAttrValue(defaultValue);
}
if ( this.fElementDeclTab.containsKey( (String) elementName) ) {
//if ElementDecl has already being created in the Grammar then remove from table,
//this.fElementDeclTab.remove( (String) elementName );
}
// then it is forward reference to a element decl, create the elementDecl first.
else {
fCurrentElementIndex = createElementDecl();//create element decl
XMLElementDecl elementDecl = new XMLElementDecl();
elementDecl.name.setValues(null, elementName, elementName, null);
elementDecl.scope= -1;
//add(or set) this elementDecl to the local cache
this.fElementDeclTab.put(elementName, elementDecl );
//set internal structure
setElementDecl(fCurrentElementIndex, elementDecl );
}
//Get Grammar index to grammar array
int elementIndex = getElementDeclIndex(elementName);
//return, when more than one definition is provided for the same attribute of given element type
//only the first declaration is binding and later declarations are ignored
if (getAttributeDeclIndex(elementIndex, attributeName) != -1) {
return;
}
fCurrentAttributeIndex = createAttributeDecl();// Create current Attribute Decl
fSimpleType.clear();
if ( defaultType != null ) {
if ( defaultType.equals( "#FIXED") ) {
fSimpleType.defaultType = fSimpleType.DEFAULT_TYPE_FIXED;
} else if ( defaultType.equals( "#IMPLIED") ) {
fSimpleType.defaultType = fSimpleType.DEFAULT_TYPE_IMPLIED;
} else if ( defaultType.equals( "#REQUIRED") ) {
fSimpleType.defaultType = fSimpleType.DEFAULT_TYPE_REQUIRED;
}
}
if ( DEBUG ) {
System.out.println("defaultvalue = " + defaultValue.toString() );
}
fSimpleType.defaultValue = defaultValue!=null ? defaultValue.toString() : null;
fSimpleType.nonNormalizedDefaultValue = nonNormalizedDefaultValue!=null ? nonNormalizedDefaultValue.toString() : null;
fSimpleType.enumeration = enumeration;
if (type.equals("CDATA")) {
fSimpleType.type = XMLSimpleType.TYPE_CDATA;
}
else if ( type.equals("ID") ) {
fSimpleType.type = XMLSimpleType.TYPE_ID;
}
else if ( type.startsWith("IDREF") ) {
fSimpleType.type = XMLSimpleType.TYPE_IDREF;
if (type.indexOf("S") > 0) {
fSimpleType.list = true;
}
}
else if (type.equals("ENTITIES")) {
fSimpleType.type = XMLSimpleType.TYPE_ENTITY;
fSimpleType.list = true;
}
else if (type.equals("ENTITY")) {
fSimpleType.type = XMLSimpleType.TYPE_ENTITY;
}
else if (type.equals("NMTOKENS")) {
fSimpleType.type = XMLSimpleType.TYPE_NMTOKEN;
fSimpleType.list = true;
}
else if (type.equals("NMTOKEN")) {
fSimpleType.type = XMLSimpleType.TYPE_NMTOKEN;
}
else if (type.startsWith("NOTATION") ) {
fSimpleType.type = XMLSimpleType.TYPE_NOTATION;
}
else if (type.startsWith("ENUMERATION") ) {
fSimpleType.type = XMLSimpleType.TYPE_ENUMERATION;
}
else {
// REVISIT: Report error message. -Ac
System.err.println("!!! unknown attribute type "+type);
}
// REVISIT: The datatype should be stored with the attribute value
// and not special-cased in the XMLValidator. -Ac
//fSimpleType.datatypeValidator = fDatatypeValidatorFactory.createDatatypeValidator(type, null, facets, fSimpleType.list);
fQName.setValues(null, attributeName, attributeName, null);
fAttributeDecl.setValues( fQName, fSimpleType, false );
setAttributeDecl(elementIndex, fCurrentAttributeIndex, fAttributeDecl);
int chunk = fCurrentAttributeIndex >> CHUNK_SHIFT;
ensureAttributeDeclCapacity(chunk);
} // attributeDecl(String,String,String,String[],String,XMLString,XMLString, Augmentations)
/** Returns the symbol table. */
public SymbolTable getSymbolTable() {
return fSymbolTable;
} // getSymbolTable():SymbolTable
/**
* Returns the index of the first element declaration. This index
* is then used to query more information about the element declaration.
*
* @see #getNextElementDeclIndex
* @see #getElementDecl
*/
public int getFirstElementDeclIndex() {
return fElementDeclCount >= 0 ? 0 : -1;
} // getFirstElementDeclIndex():int
/**
* Returns the next index of the element declaration following the
* specified element declaration.
*
* @param elementDeclIndex The element declaration index.
*/
public int getNextElementDeclIndex(int elementDeclIndex) {
return elementDeclIndex < fElementDeclCount - 1
? elementDeclIndex + 1 : -1;
} // getNextElementDeclIndex(int):int
/**
* getElementDeclIndex
*
* @param elementDeclName
*
* @return index of the elementDeclName in scope
*/
public int getElementDeclIndex(String elementDeclName) {
int mapping = fElementIndexMap.get(elementDeclName);
//System.out.println("getElementDeclIndex("+elementDeclName+") -> "+mapping);
return mapping;
} // getElementDeclIndex(String):int
/** Returns the element decl index.
* @param elementDeclQName qualilfied name of the element
*/
public int getElementDeclIndex(QName elementDeclQName) {
return getElementDeclIndex(elementDeclQName.rawname);
} // getElementDeclIndex(QName):int
/** make separate function for getting contentSpecType of element.
* we can avoid setting of the element values.
*/
public short getContentSpecType(int elementIndex){
if (elementIndex < 0 || elementIndex >= fElementDeclCount) {
return -1 ;
}
int chunk = elementIndex >> CHUNK_SHIFT;
int index = elementIndex & CHUNK_MASK;
if(fElementDeclType[chunk][index] == -1){
return -1 ;
}
else{
return (short) (fElementDeclType[chunk][index] & LIST_MASK);
}
}
/**
* getElementDecl
*
* @param elementDeclIndex
* @param elementDecl The values of this structure are set by this call.
*
* @return True if find the element, False otherwise.
*/
public boolean getElementDecl(int elementDeclIndex,
XMLElementDecl elementDecl) {
if (elementDeclIndex < 0 || elementDeclIndex >= fElementDeclCount) {
return false;
}
int chunk = elementDeclIndex >> CHUNK_SHIFT;
int index = elementDeclIndex & CHUNK_MASK;
elementDecl.name.setValues(fElementDeclName[chunk][index]);
if (fElementDeclType[chunk][index] == -1) {
elementDecl.type = -1;
elementDecl.simpleType.list = false;
} else {
elementDecl.type = (short) (fElementDeclType[chunk][index] & LIST_MASK);
elementDecl.simpleType.list = (fElementDeclType[chunk][index] & LIST_FLAG) != 0;
}
elementDecl.simpleType.defaultType = -1;
elementDecl.simpleType.defaultValue = null;
return true;
}
// REVISIT: Make this getAttributeDeclCount/getAttributeDeclAt. -Ac
/**
* getFirstAttributeDeclIndex
*
* @param elementDeclIndex
*
* @return index of the first attribute for element declaration elementDeclIndex
*/
public int getFirstAttributeDeclIndex(int elementDeclIndex) {
int chunk = elementDeclIndex >> CHUNK_SHIFT;
int index = elementDeclIndex & CHUNK_MASK;
return fElementDeclFirstAttributeDeclIndex[chunk][index];
} // getFirstAttributeDeclIndex
/**
* getNextAttributeDeclIndex
*
* @param attributeDeclIndex
*
* @return index of the next attribute of the attribute at attributeDeclIndex
*/
public int getNextAttributeDeclIndex(int attributeDeclIndex) {
int chunk = attributeDeclIndex >> CHUNK_SHIFT;
int index = attributeDeclIndex & CHUNK_MASK;
return fAttributeDeclNextAttributeDeclIndex[chunk][index];
}
/**
* getAttributeDecl
*
* @param attributeDeclIndex
* @param attributeDecl The values of this structure are set by this call.
*
* @return true if getAttributeDecl was able to fill in the value of attributeDecl
*/
public boolean getAttributeDecl(int attributeDeclIndex, XMLAttributeDecl attributeDecl) {
if (attributeDeclIndex < 0 || attributeDeclIndex >= fAttributeDeclCount) {
return false;
}
int chunk = attributeDeclIndex >> CHUNK_SHIFT;
int index = attributeDeclIndex & CHUNK_MASK;
attributeDecl.name.setValues(fAttributeDeclName[chunk][index]);
short attributeType;
boolean isList;
if (fAttributeDeclType[chunk][index] == -1) {
attributeType = -1;
isList = false;
} else {
attributeType = (short) (fAttributeDeclType[chunk][index] & LIST_MASK);
isList = (fAttributeDeclType[chunk][index] & LIST_FLAG) != 0;
}
attributeDecl.simpleType.setValues(attributeType,fAttributeDeclName[chunk][index].localpart,
fAttributeDeclEnumeration[chunk][index],
isList, fAttributeDeclDefaultType[chunk][index],
fAttributeDeclDefaultValue[chunk][index],
fAttributeDeclNonNormalizedDefaultValue[chunk][index]);
return true;
} // getAttributeDecl
/**
* Returns whether the given attribute is of type CDATA or not
*
* @param elName The element name.
* @param atName The attribute name.
*
* @return true if the attribute is of type CDATA
*/
public boolean isCDATAAttribute(QName elName, QName atName) {
int elDeclIdx = getElementDeclIndex(elName);
if (getAttributeDecl(elDeclIdx, fAttributeDecl)
&& fAttributeDecl.simpleType.type != XMLSimpleType.TYPE_CDATA){
return false;
}
return true;
}
public void printElements( ) {
int elementDeclIndex = 0;
XMLElementDecl elementDecl = new XMLElementDecl();
while (getElementDecl(elementDeclIndex++, elementDecl)) {
System.out.println("element decl: "+elementDecl.name+
", "+ elementDecl.name.rawname );
}
}
public void printAttributes(int elementDeclIndex) {
int attributeDeclIndex = getFirstAttributeDeclIndex(elementDeclIndex);
System.out.print(elementDeclIndex);
System.out.print(" [");
while (attributeDeclIndex != -1) {
System.out.print(' ');
System.out.print(attributeDeclIndex);
printAttribute(attributeDeclIndex);
attributeDeclIndex = getNextAttributeDeclIndex(attributeDeclIndex);
if (attributeDeclIndex != -1) {
System.out.print(",");
}
}
System.out.println(" ]");
}
protected int createElementDecl() {
int chunk = fElementDeclCount >> CHUNK_SHIFT;
int index = fElementDeclCount & CHUNK_MASK;
ensureElementDeclCapacity(chunk);
fElementDeclName[chunk][index] = new QName();
fElementDeclType[chunk][index] = -1;
fElementDeclFirstAttributeDeclIndex[chunk][index] = -1;
fElementDeclLastAttributeDeclIndex[chunk][index] = -1;
return fElementDeclCount++;
}
protected void setElementDecl(int elementDeclIndex, XMLElementDecl elementDecl) {
if (elementDeclIndex < 0 || elementDeclIndex >= fElementDeclCount) {
return;
}
int chunk = elementDeclIndex >> CHUNK_SHIFT;
int index = elementDeclIndex & CHUNK_MASK;
int scope = elementDecl.scope;
fElementDeclName[chunk][index].setValues(elementDecl.name);
fElementDeclType[chunk][index] = elementDecl.type;
if (elementDecl.simpleType.list == true ) {
fElementDeclType[chunk][index] |= LIST_FLAG;
}
fElementIndexMap.put(elementDecl.name.rawname, elementDeclIndex);
}
protected void setFirstAttributeDeclIndex(int elementDeclIndex, int newFirstAttrIndex){
if (elementDeclIndex < 0 || elementDeclIndex >= fElementDeclCount) {
return;
}
int chunk = elementDeclIndex >> CHUNK_SHIFT;
int index = elementDeclIndex & CHUNK_MASK;
fElementDeclFirstAttributeDeclIndex[chunk][index] = newFirstAttrIndex;
}
protected int createAttributeDecl() {
int chunk = fAttributeDeclCount >> CHUNK_SHIFT;
int index = fAttributeDeclCount & CHUNK_MASK;
ensureAttributeDeclCapacity(chunk);
fAttributeDeclName[chunk][index] = new QName();
fAttributeDeclType[chunk][index] = -1;
fAttributeDeclEnumeration[chunk][index] = null;
fAttributeDeclDefaultType[chunk][index] = XMLSimpleType.DEFAULT_TYPE_IMPLIED;
fAttributeDeclDefaultValue[chunk][index] = null;
fAttributeDeclNonNormalizedDefaultValue[chunk][index] = null;
fAttributeDeclNextAttributeDeclIndex[chunk][index] = -1;
return fAttributeDeclCount++;
}
protected void setAttributeDecl(int elementDeclIndex, int attributeDeclIndex,
XMLAttributeDecl attributeDecl) {
int attrChunk = attributeDeclIndex >> CHUNK_SHIFT;
int attrIndex = attributeDeclIndex & CHUNK_MASK;
fAttributeDeclName[attrChunk][attrIndex].setValues(attributeDecl.name);
fAttributeDeclType[attrChunk][attrIndex] = attributeDecl.simpleType.type;
if (attributeDecl.simpleType.list) {
fAttributeDeclType[attrChunk][attrIndex] |= LIST_FLAG;
}
fAttributeDeclEnumeration[attrChunk][attrIndex] = attributeDecl.simpleType.enumeration;
fAttributeDeclDefaultType[attrChunk][attrIndex] = attributeDecl.simpleType.defaultType;
fAttributeDeclDefaultValue[attrChunk][attrIndex] = attributeDecl.simpleType.defaultValue;
fAttributeDeclNonNormalizedDefaultValue[attrChunk][attrIndex] = attributeDecl.simpleType.nonNormalizedDefaultValue;
int elemChunk = elementDeclIndex >> CHUNK_SHIFT;
int elemIndex = elementDeclIndex & CHUNK_MASK;
int index = fElementDeclFirstAttributeDeclIndex[elemChunk][elemIndex];
while (index != -1) {
if (index == attributeDeclIndex) {
break;
}
attrChunk = index >> CHUNK_SHIFT;
attrIndex = index & CHUNK_MASK;
index = fAttributeDeclNextAttributeDeclIndex[attrChunk][attrIndex];
}
if (index == -1) {
if (fElementDeclFirstAttributeDeclIndex[elemChunk][elemIndex] == -1) {
fElementDeclFirstAttributeDeclIndex[elemChunk][elemIndex] = attributeDeclIndex;
} else {
index = fElementDeclLastAttributeDeclIndex[elemChunk][elemIndex];
attrChunk = index >> CHUNK_SHIFT;
attrIndex = index & CHUNK_MASK;
fAttributeDeclNextAttributeDeclIndex[attrChunk][attrIndex] = attributeDeclIndex;
}
fElementDeclLastAttributeDeclIndex[elemChunk][elemIndex] = attributeDeclIndex;
}
}
public void notationDecl(String name, XMLResourceIdentifier identifier,
Augmentations augs) throws XNIException {
XMLNotationDecl notationDecl = new XMLNotationDecl();
notationDecl.setValues(name,identifier.getPublicId(),identifier.getLiteralSystemId(),
identifier.getBaseSystemId());
notationDecls.add(notationDecl);
}
public List getNotationDecls(){
return notationDecls;
}
//
// Private methods
//
private void printAttribute(int attributeDeclIndex) {
XMLAttributeDecl attributeDecl = new XMLAttributeDecl();
if (getAttributeDecl(attributeDeclIndex, attributeDecl)) {
System.out.print(" { ");
System.out.print(attributeDecl.name.localpart);
System.out.print(" }");
}
} // printAttribute(int)
private void ensureElementDeclCapacity(int chunk) {
if (chunk >= fElementDeclName.length) {
fElementDeclName = resize(fElementDeclName, fElementDeclName.length * 2);
fElementDeclType = resize(fElementDeclType, fElementDeclType.length * 2);
fElementDeclFirstAttributeDeclIndex = resize(fElementDeclFirstAttributeDeclIndex, fElementDeclFirstAttributeDeclIndex.length * 2);
fElementDeclLastAttributeDeclIndex = resize(fElementDeclLastAttributeDeclIndex, fElementDeclLastAttributeDeclIndex.length * 2);
}
else if (fElementDeclName[chunk] != null) {
return;
}
fElementDeclName[chunk] = new QName[CHUNK_SIZE];
fElementDeclType[chunk] = new short[CHUNK_SIZE];
fElementDeclFirstAttributeDeclIndex[chunk] = new int[CHUNK_SIZE];
fElementDeclLastAttributeDeclIndex[chunk] = new int[CHUNK_SIZE];
return;
}
private void ensureAttributeDeclCapacity(int chunk) {
if (chunk >= fAttributeDeclName.length) {
fAttributeDeclName = resize(fAttributeDeclName, fAttributeDeclName.length * 2);
fAttributeDeclType = resize(fAttributeDeclType, fAttributeDeclType.length * 2);
fAttributeDeclEnumeration = resize(fAttributeDeclEnumeration, fAttributeDeclEnumeration.length * 2);
fAttributeDeclDefaultType = resize(fAttributeDeclDefaultType, fAttributeDeclDefaultType.length * 2);
fAttributeDeclDefaultValue = resize(fAttributeDeclDefaultValue, fAttributeDeclDefaultValue.length * 2);
fAttributeDeclNonNormalizedDefaultValue = resize(fAttributeDeclNonNormalizedDefaultValue, fAttributeDeclNonNormalizedDefaultValue.length * 2);
fAttributeDeclNextAttributeDeclIndex = resize(fAttributeDeclNextAttributeDeclIndex, fAttributeDeclNextAttributeDeclIndex.length * 2);
}
else if (fAttributeDeclName[chunk] != null) {
return;
}
fAttributeDeclName[chunk] = new QName[CHUNK_SIZE];
fAttributeDeclType[chunk] = new short[CHUNK_SIZE];
fAttributeDeclEnumeration[chunk] = new String[CHUNK_SIZE][];
fAttributeDeclDefaultType[chunk] = new short[CHUNK_SIZE];
fAttributeDeclDefaultValue[chunk] = new String[CHUNK_SIZE];
fAttributeDeclNonNormalizedDefaultValue[chunk] = new String[CHUNK_SIZE];
fAttributeDeclNextAttributeDeclIndex[chunk] = new int[CHUNK_SIZE];
return;
}
// resize chunks
private static short[][] resize(short array[][], int newsize) {
short newarray[][] = new short[newsize][];
System.arraycopy(array, 0, newarray, 0, array.length);
return newarray;
}
private static int[][] resize(int array[][], int newsize) {
int newarray[][] = new int[newsize][];
System.arraycopy(array, 0, newarray, 0, array.length);
return newarray;
}
private static QName[][] resize(QName array[][], int newsize) {
QName newarray[][] = new QName[newsize][];
System.arraycopy(array, 0, newarray, 0, array.length);
return newarray;
}
private static String[][] resize(String array[][], int newsize) {
String newarray[][] = new String[newsize][];
System.arraycopy(array, 0, newarray, 0, array.length);
return newarray;
}
private static String[][][] resize(String array[][][], int newsize) {
String newarray[][][] = new String[newsize] [][];
System.arraycopy(array, 0, newarray, 0, array.length);
return newarray;
}
//
// Classes
//
/**
* A simple Hashtable implementation that takes a tuple (String, String)
* as the key and a int as value.
*
* @author Eric Ye, IBM
* @author Andy Clark, IBM
*/
protected static final class QNameHashtable {
//
// Constants
//
public static final boolean UNIQUE_STRINGS = true;
/** Initial bucket size (4). */
private static final int INITIAL_BUCKET_SIZE = 4;
// NOTE: Changed previous hashtable size from 512 to 101 so
// that we get a better distribution for hashing. -Ac
/** Hashtable size (101). */
private static final int HASHTABLE_SIZE = 101;
//
// Data
//
private Object[][] fHashTable = new Object[HASHTABLE_SIZE][];
//
// Public methods
//
/** Associates the given value with the specified key tuple. */
public void put(String key, int value) {
// REVISIT: Why +2? -Ac
int hash = (hash(key)+2) % HASHTABLE_SIZE;
Object[] bucket = fHashTable[hash];
if (bucket == null) {
bucket = new Object[1 + 2*INITIAL_BUCKET_SIZE];
bucket[0] = new int[]{1};
bucket[1] = key;
bucket[2] = new int[]{value};
fHashTable[hash] = bucket;
} else {
int count = ((int[])bucket[0])[0];
int offset = 1 + 2*count;
if (offset == bucket.length) {
int newSize = count + INITIAL_BUCKET_SIZE;
Object[] newBucket = new Object[1 + 2*newSize];
System.arraycopy(bucket, 0, newBucket, 0, offset);
bucket = newBucket;
fHashTable[hash] = bucket;
}
boolean found = false;
int j=1;
for (int i=0; i<count; i++){
if ((String)bucket[j] == key) {
((int[])bucket[j+1])[0] = value;
found = true;
break;
}
j += 2;
}
if (! found) {
bucket[offset++] = key;
bucket[offset]= new int[]{value};
((int[])bucket[0])[0] = ++count;
}
}
//System.out.println("put("+key+" -> "+value+')');
//System.out.println("get("+key+") -> "+get(key));
} // put(int,String,String,int)
/** Returns the value associated with the specified key tuple. */
public int get(String key) {
int hash = (hash(key)+2) % HASHTABLE_SIZE;
Object[] bucket = fHashTable[hash];
if (bucket == null) {
return -1;
}
int count = ((int[])bucket[0])[0];
int j=1;
for (int i=0; i<count; i++){
if ((String)bucket[j] == key) {
return ((int[])bucket[j+1])[0];
}
j += 2;
}
return -1;
} // get(int,String,String)
//
// Protected methods
//
/** Returns a hash value for the specified symbol. */
protected int hash(String symbol) {
if (symbol == null) {
return 0;
}
int code = 0;
int length = symbol.length();
for (int i = 0; i < length; i++) {
code = code * 37 + symbol.charAt(i);
}
return code & 0x7FFFFFF;
} // hash(String):int
} // class QNameHashtable
/**
* Normalize the attribute value of a non CDATA default attribute
* collapsing sequences of space characters (x20)
*
* @param value The value to normalize
* @return Whether the value was changed or not.
*/
private boolean normalizeDefaultAttrValue(XMLString value) {
int oldLength = value.length;
boolean skipSpace = true; // skip leading spaces
int current = value.offset;
int end = value.offset + value.length;
for (int i = value.offset; i < end; i++) {
if (value.ch[i] == ' ') {
if (!skipSpace) {
// take the first whitespace as a space and skip the others
value.ch[current++] = ' ';
skipSpace = true;
}
else {
// just skip it.
}
}
else {
// simply shift non space chars if needed
if (current != i) {
value.ch[current] = value.ch[i];
}
current++;
skipSpace = false;
}
}
if (current != end) {
if (skipSpace) {
// if we finished on a space trim it
current--;
}
// set the new value length
value.length = current - value.offset;
return true;
}
return false;
}
public void endDTD(Augmentations augs) throws XNIException {
}
}
|
oracle/coherence | 36,027 | prj/coherence-core/src/main/java/com/tangosol/coherence/dslquery/internal/PersistenceToolsHelper.java | /*
* Copyright (c) 2000, 2023, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package com.tangosol.coherence.dslquery.internal;
import com.oracle.coherence.common.base.Blocking;
import com.oracle.coherence.persistence.PersistenceException;
import com.tangosol.coherence.dslquery.CohQLException;
import com.tangosol.coherence.dslquery.ExecutionContext;
import com.tangosol.coherence.dsltools.precedence.OPScanner;
import com.tangosol.coherence.dsltools.termtrees.AtomicTerm;
import com.tangosol.coherence.dsltools.termtrees.Term;
import com.tangosol.coherence.dsltools.termtrees.Terms;
import com.tangosol.io.FileHelper;
import com.tangosol.net.CacheFactory;
import com.tangosol.net.Cluster;
import com.tangosol.net.ConfigurableCacheFactory;
import com.tangosol.net.DistributedCacheService;
import com.tangosol.net.ExtensibleConfigurableCacheFactory;
import com.tangosol.net.Member;
import com.tangosol.net.Service;
import com.tangosol.net.management.MBeanServerProxy;
import com.tangosol.net.management.Registry;
import com.tangosol.persistence.CachePersistenceHelper;
import com.tangosol.persistence.PersistenceEnvironmentInfo;
import com.tangosol.util.Base;
import com.tangosol.util.Filters;
import com.tangosol.util.WrapperException;
import java.io.File;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.management.MBeanException;
import static com.tangosol.persistence.CachePersistenceHelper.getMBeanName;
/**
* Various helper classes to support calling Persistence operations
* from within CohQL.
*
* @author tam 2014.02.14
* @since 12.2.1
*/
public class PersistenceToolsHelper
{
// ----- constructors ---------------------------------------------------
/**
* Construct a new PersistenceToolsHelper which can be used to issue
* persistence related commands from CohQL. No-args constructor is used
* when no tracing is required.
*/
public PersistenceToolsHelper()
{
this(null);
}
/**
* Construct a new PersistenceToolsHelper which can be used to issue
* persistence related commands from CohQL.
*
* @param out the PrintWriter to write trace messages to
*/
public PersistenceToolsHelper(PrintWriter out)
{
Cluster cluster = CacheFactory.ensureCluster();
m_registry = cluster.getManagement();
m_out = out;
if (m_registry == null)
{
throw new CohQLException("Unable to retrieve Registry from cluster");
}
m_mbsProxy = m_registry.getMBeanServerProxy();
ensureMBeanRegistration(Registry.CLUSTER_TYPE);
}
// ----- PersistenceToolsHelper methods----------------------------------
/**
* Ensure a {@link PersistenceToolsHelper} exists within the CohQL {@link ExecutionContext}
* which can be used to issue cluster related Persistence commands.
* If it doesn't, then create a new one.
*
* @param ctx current CohQL {@link ExecutionContext}
*
* @return the existing PersistenceToolsHelper or a new one if doesn't exist
*
* @throws CohQLException if we are unable to retrieve a new API
*/
public static PersistenceToolsHelper ensurePersistenceToolsHelper(ExecutionContext ctx)
throws CohQLException
{
PersistenceToolsHelper helper = ctx.getResourceRegistry().getResource(PersistenceToolsHelper.class, HELPER);
try
{
if (helper == null)
{
helper = new PersistenceToolsHelper(ctx.isTraceEnabled() ? ctx.getWriter() : null);
ctx.getResourceRegistry().registerResource(PersistenceToolsHelper.class, HELPER, helper);
}
}
catch (Exception e)
{
throw ensureCohQLException(e, "Unable to instantiate PersistenceToolsHelper");
}
return helper;
}
/**
* Issue an operation and wait for the operation to be complete by
* polling the "Idle" attribute of the PersistenceCoordinator for the service.
* This method will poll continuously until an "Idle" status has been reached
* or until timeout set by a calling thread has been raised. e.g.<br>
* <pre>
* try (Timeout t = Timeout.after(120, TimeUnit.SECONDS))
* {
* helper.invokeOperationWithWait("createSnapshot", "snapshot", "Service");
* }
* </pre>
* When called from CohQL, the TIMEOUT value set in CohQL will be used to interrupt
* the operation if it has not completed. <br>
* Note: Even though and exception is raised, the MBean operation will still
* execute to completion, but CohQL will return immediately without waiting.
*
* @param sOperation the operation to execute
* @param sSnapshot the snapshot name
* @param sServiceName the name of the service to execute operation on
*
* @throws MBeanException if any MBean related errors
*/
public void invokeOperationWithWait(String sOperation, String sSnapshot, String sServiceName)
throws MBeanException
{
boolean fisIdle;
try
{
invokeOperation(sOperation, sServiceName, new String[] {sSnapshot}, new String[] {"java.lang.String"});
String sBeanName = getPersistenceMBean(sServiceName);
// COH-20778 wait a bit longer to avoid false positive of "Idle" check on window
Blocking.sleep(SLEEP_TIME);
while (true)
{
Blocking.sleep(SLEEP_TIME);
fisIdle = (boolean) getAttribute(sBeanName, "Idle");
traceMessage("Idle = " + fisIdle);
if (fisIdle)
{
// idle means the operation has completed as we are guaranteed an up-to-date
// attribute value just after an operation was called
return;
}
traceMessage("Operation " + sOperation + " not yet complete, waiting "
+ SLEEP_TIME + "ms");
}
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Unable to complete operation " +
sOperation + " for service " + sServiceName);
}
}
/**
* Invoke an operation against a PersistenceManagerMBean associated to the
* given service name.
*
* @param sOperation the operation to execute
* @param sServiceName the name of the service to execute operation on
* @param aoParams the parameters of the operation
* @param asParamTypes the parameter types of the operation
*
* @throws MBeanException if an error occurred invoking the MBean
*/
public void invokeOperation(String sOperation, String sServiceName, Object[] aoParams, String[] asParamTypes)
throws MBeanException
{
String sBeanName = getPersistenceMBean(sServiceName);
traceMessage("Invoking " + sOperation + " on " + sBeanName +
" using params = " + Arrays.toString(aoParams));
m_mbsProxy.invoke(sBeanName, sOperation, aoParams, asParamTypes);
}
/**
* Validate that a service name exists for the current cluster.
*
* @param sServiceName the service name to check
*
* @return true if the service exists
*/
public boolean serviceExists(String sServiceName)
{
try
{
Map<String, String[]> mapServices = listServices();
return mapServices != null && mapServices.containsKey(sServiceName);
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Error validating service");
}
}
/**
* Validate that a snapshot exists for a given service.
*
* @param sServiceName the service name to check
* @param sSnapshotName the snapshot name to check
*
* @return true if the snapshot exists for the service
*/
public boolean snapshotExists(String sServiceName, String sSnapshotName)
{
try
{
String[] asSnapshots = listSnapshots(sServiceName);
return asSnapshots != null && Arrays.asList(asSnapshots).contains(sSnapshotName);
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Error validating snapshot");
}
}
/**
* Validate that an archived snapshot exists for a given service.
*
* @param sServiceName the service name to check
* @param sSnapshotName the archived snapshot name to check
*
* @return true if the archived snapshot exists for the service
*/
public boolean archivedSnapshotExists(String sServiceName, String sSnapshotName)
{
try
{
String[] asSnapshots = listArchivedSnapshots(sServiceName);
return asSnapshots != null && Arrays.asList(asSnapshots).contains(sSnapshotName);
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Error validating archived snapshots");
}
}
/**
* Validate that a snapshot exists across all services.
*
* @param sSnapshotName the snapshot name to check
*
* @throws CohQLException if the condition is not met
*/
public void validateSnapshotExistsForAllServices(String sSnapshotName)
{
StringBuilder sb = new StringBuilder();
try
{
for (Map.Entry<String, String[]> entry : listSnapshots().entrySet())
{
String[] asSnapshots = entry.getValue();
if (!Arrays.asList(asSnapshots).contains(sSnapshotName))
{
sb.append("The snapshot ").append(sSnapshotName)
.append(" does not exist on service ").append(entry.getKey())
.append('\n');
}
}
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Error validating snapshot");
}
if (sb.length() > 0)
{
throw new CohQLException(sb.toString());
}
}
/**
* Validate that an archived snapshot exists across all services to ensure
* success for a retrieve or purge operation.
*
* @param sSnapshotName the archived snapshot name to check
*
* @throws CohQLException if the condition is met
*/
public void validateArchivedSnapshotExistsForAllServices(String sSnapshotName)
{
StringBuilder sb = new StringBuilder();
try
{
for (Map.Entry<String, String[]> entry : listServices().entrySet())
{
String sServiceName = entry.getKey();
String[] asArchivedSnapshots = listArchivedSnapshots(sServiceName);
if (!Arrays.asList(asArchivedSnapshots).contains(sSnapshotName))
{
sb.append("The archived snapshot ").append(sSnapshotName)
.append(" does not exist on service ").append(sServiceName)
.append('\n');
}
}
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Error validating snapshot");
}
if (sb.length() > 0)
{
throw new CohQLException(sb.toString());
}
}
/**
* List all the services configured for active or on-demand mode and information
* about them including: persistence-mode, QuorumStatus and current operation status
* from the PersistenceSnapshotMBean.
*
* @return a {@link Map} of services and related information
*/
public Map<String, String[]> listServices()
{
Map<String, String[]> mapResults = new HashMap<>();
for (Map.Entry<String, String> entry : getPersistenceServices().entrySet())
{
String sServiceName = entry.getKey();
String sPersistenceMode = entry.getValue();
String[] asResults = getServiceInfo(sServiceName);
mapResults.put(sServiceName, new String[] {sPersistenceMode, asResults[0], asResults[1]});
}
return mapResults;
}
/**
* List all the services configured for active or on-demand mode and display the
* persistence environment implementation.
*
* @return a {@link List} of services and related information
*/
public List<String> listServicesEnvironment()
{
List<String> listInfo = new ArrayList<>();
for (String sServiceName : getPersistenceServices().keySet())
{
String sMBean = getStorageEnabledMember(sServiceName);
if (sMBean == null)
{
throw new RuntimeException("Unable to find storage-enabled members for service " + sServiceName);
}
String sEnvironment = (String) getAttribute(sMBean, "PersistenceEnvironment");
listInfo.add(sServiceName + " - " + sEnvironment);
}
return listInfo;
}
/**
* List the snapshots for the specified service.
*
* @param sServiceName the name of the service to list snapshots for
*
* @return the snapshots for the specified service or an empty String[]
* if none exist
*/
public String[] listSnapshots(String sServiceName)
{
try
{
String[] asSnapshots = (String[]) getAttribute(
getPersistenceMBean(sServiceName), "Snapshots");
return asSnapshots == null ? NO_SNAPSHOTS : asSnapshots;
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e);
}
}
/**
* List the snapshots for all services.
*
* @return a {@link java.util.Map} of services and their snapshots.
*/
public Map<String, String[]> listSnapshots()
{
Map<String, String[]> mapResults = new HashMap<>();
for (Map.Entry<String, String> entry : getPersistenceServices().entrySet())
{
String sServiceName = entry.getKey();
mapResults.put(sServiceName, listSnapshots(sServiceName));
}
return mapResults;
}
/**
* Return a list of archived snapshots for a given service.
*
* @param sServiceName the name of the service to query
*
* @return a {@link String}[] of archived snapshots for the given service
*/
public String[] listArchivedSnapshots(String sServiceName)
{
try
{
return (String[]) m_mbsProxy.invoke(
getPersistenceMBean(sServiceName), "listArchivedSnapshots",
new String[] {}, new String[] {});
}
catch (Exception e)
{
throw new RuntimeException("Unable to execute listArchivedSnapshots for service "
+ sServiceName + ": " + e.getMessage());
}
}
/**
* List the archived snapshots for all services.
*
* @return a {@link java.util.Map} of services and their archived snapshots.
*/
public Map<String, String[]> listArchivedSnapshots()
{
Map<String, String[]> mapResults = new HashMap<>();
// go through each of the services returned and retrieve the snapshots.
for (String sServiceName : getPersistenceServices().keySet())
{
try
{
mapResults.put(sServiceName, listArchivedSnapshots(sServiceName));
}
catch (Exception e)
{
if (e instanceof RuntimeException && e.getMessage().contains("MBeanException"))
{
// ignore as we may not have an archiver defined for the service
}
else
{
throw ensureCohQLException(e, "Unable to list archived snapshots");
}
}
}
return mapResults;
}
/**
* Return the archiver configured for the given service.
*
* @param sServiceName the name of the service to query
*
* @return the archiver configured for the given services or 'n/a' if none exists
*/
public String getArchiver(String sServiceName)
{
String sMBean = getStorageEnabledMember(sServiceName);
if (sMBean == null)
{
throw new RuntimeException("Unable to find storage-enabled members for service " + sServiceName);
}
return (String) getAttribute(sMBean, "PersistenceSnapshotArchiver");
}
/**
* Resume a given service.
*
* @param sServiceName the service to resume
*/
public void resumeService(String sServiceName)
{
try
{
m_mbsProxy.invoke(Registry.CLUSTER_TYPE, RESUME_SERVICE, new String[] {sServiceName},
new String[] {"java.lang.String"});
}
catch (Exception e)
{
throw new RuntimeException("Unable to resume service " + e.getMessage());
}
}
/**
* Suspend a given service.
*
* @param sServiceName the service to suspend
*/
public void suspendService(String sServiceName)
{
try
{
m_mbsProxy.invoke(Registry.CLUSTER_TYPE, SUSPEND_SERVICE, new String[] {sServiceName},
new String[] {"java.lang.String"});
}
catch (Exception e)
{
throw new RuntimeException("Unable to resume service " + e.getMessage());
}
}
/**
* Ensures that the specified service is in a ready state to begin snapshot operations.
* Ie. The service should not have operations that are running. This call will
* wait for any processes to complete if fWait is true. <br>
* This method will poll continuously until an "Idle" status has been reached
* or until timeout set by a calling thread has been raised.
*
* @param fWait if true and the service is not Idle then wait, otherwise
* throw an exception
* @param sServiceToCheck the service to check for or null for all services
*
* @throws CohQLException if any services are not in a proper state
*/
public void ensureReady(boolean fWait, String sServiceToCheck)
{
try
{
while (true)
{
String sStatus = getOperationStatus(sServiceToCheck);
if (STATUS_IDLE.equals(sStatus))
{
// operation is Idle
break;
}
else
{
if (fWait)
{
Blocking.sleep(SLEEP_TIME);
}
else
{
throw new CohQLException("The service " + sServiceToCheck
+ " currently has an operation in progress: \n" + sStatus
+ "\nPlease use LIST SERVICES to determine when service is ready.");
}
}
}
}
catch (Exception e)
{
throw ensureCohQLException(e, "Error during ensureReady");
}
}
/**
* Ensures that the services are in a ready state to begin snapshot operations.
* Ie. they should not have operations that are running. If the context is
* silent then we will wait, otherwise will fail fast.
*
* @param ctx context
* @param sService the service to wait to be ready or if null, then all services
*
* @throws CohQLException if any services are not in a proper state
*/
public void ensureReady(ExecutionContext ctx, String sService)
{
ensureReady(ctx.isSilent(), sService);
}
/**
* Return a CohQLException with the given cause. If the specified
* cause is an instance of CohQLException, the given throwable will
* be returned as is; otherwise, a new CohQLException will be
* allocated and returned.
*
* @param eCause an optional cause
* @param sMsg an optional detail message
*
* @return a CohQLException with the given cause and detail message
*/
public static CohQLException ensureCohQLException(Throwable eCause, String sMsg)
{
StringBuilder sb = new StringBuilder(sMsg);
Throwable cause = eCause;
// check for exception raised from Mbean Server or PersistenceException
if ((eCause instanceof WrapperException && eCause.getCause() instanceof RuntimeException) ||
(eCause instanceof PersistenceException))
{
Throwable t = eCause.getCause();
sb.append(" - ").append(eCause.getMessage());
if (t != null)
{
sb.append('\n').append(t.getMessage());
cause = t.getCause();
if (cause != null)
{
sb.append('\n').append(cause.getMessage());
sb.append('\n').append(cause.getCause());
}
}
}
return eCause instanceof CohQLException ?
(CohQLException) eCause : new CohQLException(sb.toString(), cause);
}
/**
* Output a trace message to the defined {@link java.io.PrintWriter}.
*
* @param sMessage the message to output
*/
private void traceMessage(String sMessage)
{
if (isTraceEnabled())
{
m_out.println(new Date(Base.getSafeTimeMillis()) + " : " + sMessage);
m_out.flush();
}
}
public String getOperationStatus(String sServiceName)
{
return (String) getAttribute(getPersistenceMBean(sServiceName), "OperationStatus");
}
/**
* Validate that a snapshot does not exist across all services.
*
* @param sSnapshotName the snapshot name to check
*
*/
private void validateNoSnapshotExistsForAllServices(String sSnapshotName)
{
StringBuilder sb = new StringBuilder();
try
{
for (Map.Entry<String, String[]> entry : listSnapshots().entrySet())
{
String[] asSnapshots = entry.getValue();
if (Arrays.asList(asSnapshots).contains(sSnapshotName))
{
sb.append("The snapshot ").append(sSnapshotName)
.append(" already exists on service ").append(entry.getKey())
.append('\n');
}
}
}
catch (Exception e)
{
throw Base.ensureRuntimeException(e, "Error validating snapshot");
}
if (sb.length() > 0)
{
throw new CohQLException(sb.toString());
}
}
/**
* Return a {@link Map} of services that are configured for persistence as either
* active or on-demand.
*
* @return the {@link Map} of services with service name as key and
* persistence mode as value
*/
private Map<String, String> getPersistenceServices()
{
Map<String, String> mapServices = new HashMap<>();
String sQuery = COHERENCE + Registry.PARTITION_ASSIGNMENT_TYPE + ",responsibility=DistributionCoordinator,*";
Set<String> setServiceNames = m_mbsProxy.queryNames(sQuery, null)
.stream()
.map(s->s.replaceAll("^.*type=PartitionAssignment", "")
.replaceAll(",responsibility=DistributionCoordinator", "")
.replaceAll("domainPartition=.*,", "")
.replaceAll(",service=", ""))
.collect(Collectors.toSet());
setServiceNames.forEach(s ->
{
Optional<String> serviceMBean =
m_mbsProxy.queryNames(COHERENCE + Registry.SERVICE_TYPE + ",name=" + s + ",*", null)
.stream().findAny();
if (serviceMBean.isPresent())
{
String sServiceMbean = serviceMBean.get();
Map<String, Object> mapServiceAttr = m_mbsProxy.getAttributes(sServiceMbean, Filters.always());
mapServices.put(s, (String) mapServiceAttr.get("PersistenceMode"));
}
});
return mapServices;
}
/**
* Return the ObjectName of a storage-enable member for the given service.
*
* @param sServiceName the service name to retrieve ObjectName for
*
* @return the ObjectName of a storage-enable member for the service an null if none
*/
private String getStorageEnabledMember(String sServiceName)
{
Set<String> setServices = m_mbsProxy.queryNames(COHERENCE + Registry.SERVICE_TYPE + ",name=" + sServiceName + ",*", null);
// find the first storage-enabled member
for (String sMbean : setServices)
{
if ((Integer) getAttribute(sMbean, "OwnedPartitionsPrimary") > 0)
{
return sMbean;
}
}
return null;
}
/**
* Ensure that a object name is registered as there can be a race condition
* as some MBeans are registered async.
*
* @param sObjectName the object name to ensure
*/
private void ensureMBeanRegistration(String sObjectName)
{
boolean fLogged = false;
int nCounter = 3 * 1000; // 30 seconds , 3,000 * 10ms wait
// wait for registration of sObjectName as the registration is done
// async and may not be complete before our first call after ensureCluster().
while (!m_mbsProxy.isMBeanRegistered(sObjectName))
{
if (isTraceEnabled() && !fLogged)
{
traceMessage("Waiting for " + sObjectName + " to be registered");
fLogged = true;
}
try
{
Blocking.sleep(10L);
}
catch (InterruptedException e)
{
}
if (--nCounter <= 0)
{
// fail-safe in case cluster never registered
throw new RuntimeException("MBean " + sObjectName + " was not registered after 30 seconds." +
" You must be running an MBean Server within the cluster to use 'Persistence' commands.");
}
}
if (isTraceEnabled() && fLogged)
{
traceMessage(sObjectName + " is now registered");
}
}
/**
* Return service information for the list services command. The values returned are:
* <ol>
* <li>[0] - QuorumStatus</li>
* <li>[1] - OperationStatus</li>
* </ol>
*
* @param sServiceName the name of the service to query
*
* @return a {@link String} array of information
*/
private String[] getServiceInfo(String sServiceName)
{
String sMBean = getStorageEnabledMember(sServiceName);
if (sMBean == null)
{
throw new RuntimeException("Unable to find storage-enabled members for service " + sServiceName);
}
String sQuorumStatus = (String) getAttribute((sMBean), "QuorumStatus");
String sMBeanName = getMBeanName(sServiceName);
if (sMBean.contains("domainPartition"))
{
String sDomainPartition = sMBean.replaceAll("^.*,domainPartition=", "domainPartition=")
.replaceAll(",.*$", "");
sMBeanName = getMBeanName(sServiceName) + "," + sDomainPartition;
}
String sOperationStatus = (String) getAttribute(sMBeanName, "OperationStatus");
return new String[] {sQuorumStatus, sOperationStatus};
}
/**
* Return the PersistenceManager MBean name.
*
* @param sServiceName the name of the service to return the name for
*
* @return the MBean name
*/
public String getPersistenceMBean(String sServiceName)
{
return ensureGlobalName(getMBeanName(sServiceName));
}
/**
* Return the Service MBean name.
*
* @param sServiceName the name of the service to return the name for
* @param member the member of the service to return the name for
*
* @return the MBean name
*/
public String getServiceMBean(String sServiceName, Member member)
{
return m_registry.ensureGlobalName(
Registry.SERVICE_TYPE + ",name=" + sServiceName, member);
}
/**
* Return true if the service is federated or distributed
*
* @param sType the service type
*
* @return true if the service is federated or distributed
*/
private static boolean isValidServiceType(String sType)
{
return "DistributedCache".equals(sType) || "FederatedCache".equals(sType);
}
/**
* Return a global name for the given MBean Name.
*
* @param sName the MBean to get global name for.
*
* @return the global name.
*/
private String ensureGlobalName(String sName)
{
return m_registry.ensureGlobalName(sName);
}
/**
* Return an attribute name from an MBean.
*
* @param sObjectName object name to query
* @param sAttribute attribute to retrieve from object name
*
* @return the value of the attribute
*/
private Object getAttribute(String sObjectName, String sAttribute)
{
return m_mbsProxy.getAttribute(sObjectName, sAttribute);
}
// ----- helpers --------------------------------------------------------
/**
* Return a term for a given scanner representing the specified name.
* If the end of statement is reached then an CohQLException is raised.
*
* @param s OPScanner to use
* @param sName the name to assign the new term
* @param sDescription a description for any exception
* @param sCommand the command name
*
* @return a new term
*
* @throws CohQLException if end of statement is reached
*/
public static Term getNextTerm(OPScanner s, String sName, String sDescription, String sCommand)
{
if (s.isEndOfStatement())
{
throw new CohQLException(sDescription + " required for " + sCommand);
}
return Terms.newTerm(sName, AtomicTerm.createString(s.getCurrentAsStringWithAdvance()));
}
/**
* Return the snapshot directory for a given service and snapshot.
*
* @param ccf ConfigurableCacheFactory to use to get dependencies
* @param sSnapshot the snapshot name to use
* @param sServiceName the service name to use
*
* @return a File representing the snapshot directory
*/
public static File getSnapshotDirectory(ConfigurableCacheFactory ccf, String sSnapshot, String sServiceName)
{
if (ccf instanceof ExtensibleConfigurableCacheFactory)
{
PersistenceEnvironmentInfo info =
CachePersistenceHelper.getEnvironmentInfo((ExtensibleConfigurableCacheFactory) ccf, sServiceName);
if (info == null)
{
throw new CohQLException("Unable to get persistence environment info for service " +
sServiceName + " and snapshot " + sSnapshot);
}
return new File(info.getPersistenceSnapshotDirectory(), FileHelper.toFilename(sSnapshot));
}
throw new UnsupportedOperationException("ConfigurableCacheFactory is not an instance of ExtensibleConfigurableCacheFactory");
}
// ----- accessors ------------------------------------------------------
/**
* Set the {@link java.io.PrintWriter} for any messages to go to.
*
* @param out the {@link java.io.PrintWriter} to use
*/
public void setPrintWriter(PrintWriter out)
{
m_out = out;
}
/**
* Return if trace is enabled.
*
* @return if trace is enabled
*/
public boolean isTraceEnabled()
{
return m_out != null;
}
// ----- constants ------------------------------------------------------
/**
* Coherence Prefix.
*/
private static final String COHERENCE = "Coherence:";
/**
* JMX operation to create a snapshot.
*/
public static final String CREATE_SNAPSHOT = "createSnapshot";
/**
* JMX operation to recover a snapshot.
*/
public static final String RECOVER_SNAPSHOT = "recoverSnapshot";
/**
* JMX operation to remove a snapshot.
*/
public static final String REMOVE_SNAPSHOT = "removeSnapshot";
/**
* JMX operation to archive a snapshot
*/
public static final String ARCHIVE_SNAPSHOT = "archiveSnapshot";
/**
* JMX operation to retrieve an archived snapshot
*/
public static final String RETRIEVE_ARCHIVED_SNAPSHOT = "retrieveArchivedSnapshot";
/**
* JMX operation to remove an archived snapshot
*/
public static final String REMOVE_ARCHIVED_SNAPSHOT = "removeArchivedSnapshot";
/**
* JMX operation to suspend a service.
*/
public static final String SUSPEND_SERVICE = "suspendService";
/**
* JMX operation to resume a service.
*/
public static final String RESUME_SERVICE = "resumeService";
/**
* JMX operation to force recovery.
*/
public static final String FORCE_RECOVERY = "forceRecovery";
/**
* Idle status.
*/
private static final String STATUS_IDLE = "Idle";
/**
* Sleep time between checking operation completion.
*/
private static final long SLEEP_TIME = 2000L;
/**
* Cluster Tools registry key.
*/
private static final String HELPER = "persistence_tools_helper";
/**
* Signifies no snapshots were found.
*/
private static final String[] NO_SNAPSHOTS = new String[0];
// ----- data members ---------------------------------------------------
/**
* A PrintWriter to output any informational messages.
*/
private PrintWriter m_out = null;
/**
* MBean server proxy for JMX operations and attribute retrieval for online mode.
*/
private MBeanServerProxy m_mbsProxy;
/**
* Management Registry if we are connected to a cluster.
*/
private Registry m_registry;
}
|
googleapis/google-cloud-java | 36,118 | java-kms/proto-google-cloud-kms-v1/src/main/java/com/google/cloud/kms/v1/CreateKeyRingRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/kms/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.kms.v1;
/**
*
*
* <pre>
* Request message for
* [KeyManagementService.CreateKeyRing][google.cloud.kms.v1.KeyManagementService.CreateKeyRing].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.CreateKeyRingRequest}
*/
public final class CreateKeyRingRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.kms.v1.CreateKeyRingRequest)
CreateKeyRingRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateKeyRingRequest.newBuilder() to construct.
private CreateKeyRingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateKeyRingRequest() {
parent_ = "";
keyRingId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateKeyRingRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_CreateKeyRingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_CreateKeyRingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.CreateKeyRingRequest.class,
com.google.cloud.kms.v1.CreateKeyRingRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEY_RING_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object keyRingId_ = "";
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The keyRingId.
*/
@java.lang.Override
public java.lang.String getKeyRingId() {
java.lang.Object ref = keyRingId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
keyRingId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for keyRingId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKeyRingIdBytes() {
java.lang.Object ref = keyRingId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
keyRingId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KEY_RING_FIELD_NUMBER = 3;
private com.google.cloud.kms.v1.KeyRing keyRing_;
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the keyRing field is set.
*/
@java.lang.Override
public boolean hasKeyRing() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The keyRing.
*/
@java.lang.Override
public com.google.cloud.kms.v1.KeyRing getKeyRing() {
return keyRing_ == null ? com.google.cloud.kms.v1.KeyRing.getDefaultInstance() : keyRing_;
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.kms.v1.KeyRingOrBuilder getKeyRingOrBuilder() {
return keyRing_ == null ? com.google.cloud.kms.v1.KeyRing.getDefaultInstance() : keyRing_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyRingId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, keyRingId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getKeyRing());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(keyRingId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, keyRingId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getKeyRing());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.kms.v1.CreateKeyRingRequest)) {
return super.equals(obj);
}
com.google.cloud.kms.v1.CreateKeyRingRequest other =
(com.google.cloud.kms.v1.CreateKeyRingRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getKeyRingId().equals(other.getKeyRingId())) return false;
if (hasKeyRing() != other.hasKeyRing()) return false;
if (hasKeyRing()) {
if (!getKeyRing().equals(other.getKeyRing())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + KEY_RING_ID_FIELD_NUMBER;
hash = (53 * hash) + getKeyRingId().hashCode();
if (hasKeyRing()) {
hash = (37 * hash) + KEY_RING_FIELD_NUMBER;
hash = (53 * hash) + getKeyRing().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.kms.v1.CreateKeyRingRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [KeyManagementService.CreateKeyRing][google.cloud.kms.v1.KeyManagementService.CreateKeyRing].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.CreateKeyRingRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.kms.v1.CreateKeyRingRequest)
com.google.cloud.kms.v1.CreateKeyRingRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_CreateKeyRingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_CreateKeyRingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.CreateKeyRingRequest.class,
com.google.cloud.kms.v1.CreateKeyRingRequest.Builder.class);
}
// Construct using com.google.cloud.kms.v1.CreateKeyRingRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getKeyRingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
keyRingId_ = "";
keyRing_ = null;
if (keyRingBuilder_ != null) {
keyRingBuilder_.dispose();
keyRingBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_CreateKeyRingRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyRingRequest getDefaultInstanceForType() {
return com.google.cloud.kms.v1.CreateKeyRingRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyRingRequest build() {
com.google.cloud.kms.v1.CreateKeyRingRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyRingRequest buildPartial() {
com.google.cloud.kms.v1.CreateKeyRingRequest result =
new com.google.cloud.kms.v1.CreateKeyRingRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.kms.v1.CreateKeyRingRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.keyRingId_ = keyRingId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.keyRing_ = keyRingBuilder_ == null ? keyRing_ : keyRingBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.kms.v1.CreateKeyRingRequest) {
return mergeFrom((com.google.cloud.kms.v1.CreateKeyRingRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.kms.v1.CreateKeyRingRequest other) {
if (other == com.google.cloud.kms.v1.CreateKeyRingRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getKeyRingId().isEmpty()) {
keyRingId_ = other.keyRingId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasKeyRing()) {
mergeKeyRing(other.getKeyRing());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
keyRingId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getKeyRingFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the location associated with the
* [KeyRings][google.cloud.kms.v1.KeyRing], in the format
* `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object keyRingId_ = "";
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The keyRingId.
*/
public java.lang.String getKeyRingId() {
java.lang.Object ref = keyRingId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
keyRingId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for keyRingId.
*/
public com.google.protobuf.ByteString getKeyRingIdBytes() {
java.lang.Object ref = keyRingId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
keyRingId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The keyRingId to set.
* @return This builder for chaining.
*/
public Builder setKeyRingId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
keyRingId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearKeyRingId() {
keyRingId_ = getDefaultInstance().getKeyRingId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. It must be unique within a location and match the regular
* expression `[a-zA-Z0-9_-]{1,63}`
* </pre>
*
* <code>string key_ring_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for keyRingId to set.
* @return This builder for chaining.
*/
public Builder setKeyRingIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
keyRingId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.kms.v1.KeyRing keyRing_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.kms.v1.KeyRing,
com.google.cloud.kms.v1.KeyRing.Builder,
com.google.cloud.kms.v1.KeyRingOrBuilder>
keyRingBuilder_;
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the keyRing field is set.
*/
public boolean hasKeyRing() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The keyRing.
*/
public com.google.cloud.kms.v1.KeyRing getKeyRing() {
if (keyRingBuilder_ == null) {
return keyRing_ == null ? com.google.cloud.kms.v1.KeyRing.getDefaultInstance() : keyRing_;
} else {
return keyRingBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setKeyRing(com.google.cloud.kms.v1.KeyRing value) {
if (keyRingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
keyRing_ = value;
} else {
keyRingBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setKeyRing(com.google.cloud.kms.v1.KeyRing.Builder builderForValue) {
if (keyRingBuilder_ == null) {
keyRing_ = builderForValue.build();
} else {
keyRingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeKeyRing(com.google.cloud.kms.v1.KeyRing value) {
if (keyRingBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& keyRing_ != null
&& keyRing_ != com.google.cloud.kms.v1.KeyRing.getDefaultInstance()) {
getKeyRingBuilder().mergeFrom(value);
} else {
keyRing_ = value;
}
} else {
keyRingBuilder_.mergeFrom(value);
}
if (keyRing_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearKeyRing() {
bitField0_ = (bitField0_ & ~0x00000004);
keyRing_ = null;
if (keyRingBuilder_ != null) {
keyRingBuilder_.dispose();
keyRingBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.kms.v1.KeyRing.Builder getKeyRingBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getKeyRingFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.kms.v1.KeyRingOrBuilder getKeyRingOrBuilder() {
if (keyRingBuilder_ != null) {
return keyRingBuilder_.getMessageOrBuilder();
} else {
return keyRing_ == null ? com.google.cloud.kms.v1.KeyRing.getDefaultInstance() : keyRing_;
}
}
/**
*
*
* <pre>
* Required. A [KeyRing][google.cloud.kms.v1.KeyRing] with initial field
* values.
* </pre>
*
* <code>.google.cloud.kms.v1.KeyRing key_ring = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.kms.v1.KeyRing,
com.google.cloud.kms.v1.KeyRing.Builder,
com.google.cloud.kms.v1.KeyRingOrBuilder>
getKeyRingFieldBuilder() {
if (keyRingBuilder_ == null) {
keyRingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.kms.v1.KeyRing,
com.google.cloud.kms.v1.KeyRing.Builder,
com.google.cloud.kms.v1.KeyRingOrBuilder>(
getKeyRing(), getParentForChildren(), isClean());
keyRing_ = null;
}
return keyRingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.kms.v1.CreateKeyRingRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.kms.v1.CreateKeyRingRequest)
private static final com.google.cloud.kms.v1.CreateKeyRingRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.kms.v1.CreateKeyRingRequest();
}
public static com.google.cloud.kms.v1.CreateKeyRingRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateKeyRingRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateKeyRingRequest>() {
@java.lang.Override
public CreateKeyRingRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateKeyRingRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateKeyRingRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.kms.v1.CreateKeyRingRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,198 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/UpdateQuotaRuleRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/quota_rule.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* UpdateQuotaRuleRequest for updating a quota rule.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateQuotaRuleRequest}
*/
public final class UpdateQuotaRuleRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.UpdateQuotaRuleRequest)
UpdateQuotaRuleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateQuotaRuleRequest.newBuilder() to construct.
private UpdateQuotaRuleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateQuotaRuleRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateQuotaRuleRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_UpdateQuotaRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_UpdateQuotaRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.class,
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int QUOTA_RULE_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.QuotaRule quotaRule_;
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the quotaRule field is set.
*/
@java.lang.Override
public boolean hasQuotaRule() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The quotaRule.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.QuotaRule getQuotaRule() {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.QuotaRuleOrBuilder getQuotaRuleOrBuilder() {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getQuotaRule());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getQuotaRule());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.UpdateQuotaRuleRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest other =
(com.google.cloud.netapp.v1.UpdateQuotaRuleRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasQuotaRule() != other.hasQuotaRule()) return false;
if (hasQuotaRule()) {
if (!getQuotaRule().equals(other.getQuotaRule())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasQuotaRule()) {
hash = (37 * hash) + QUOTA_RULE_FIELD_NUMBER;
hash = (53 * hash) + getQuotaRule().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.netapp.v1.UpdateQuotaRuleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* UpdateQuotaRuleRequest for updating a quota rule.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateQuotaRuleRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.UpdateQuotaRuleRequest)
com.google.cloud.netapp.v1.UpdateQuotaRuleRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_UpdateQuotaRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_UpdateQuotaRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.class,
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getQuotaRuleFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
quotaRule_ = null;
if (quotaRuleBuilder_ != null) {
quotaRuleBuilder_.dispose();
quotaRuleBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_UpdateQuotaRuleRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateQuotaRuleRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateQuotaRuleRequest build() {
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateQuotaRuleRequest buildPartial() {
com.google.cloud.netapp.v1.UpdateQuotaRuleRequest result =
new com.google.cloud.netapp.v1.UpdateQuotaRuleRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.UpdateQuotaRuleRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.quotaRule_ = quotaRuleBuilder_ == null ? quotaRule_ : quotaRuleBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.UpdateQuotaRuleRequest) {
return mergeFrom((com.google.cloud.netapp.v1.UpdateQuotaRuleRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.UpdateQuotaRuleRequest other) {
if (other == com.google.cloud.netapp.v1.UpdateQuotaRuleRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasQuotaRule()) {
mergeQuotaRule(other.getQuotaRule());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getQuotaRuleFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* Quota Rule resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.netapp.v1.QuotaRule quotaRule_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.QuotaRule,
com.google.cloud.netapp.v1.QuotaRule.Builder,
com.google.cloud.netapp.v1.QuotaRuleOrBuilder>
quotaRuleBuilder_;
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the quotaRule field is set.
*/
public boolean hasQuotaRule() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The quotaRule.
*/
public com.google.cloud.netapp.v1.QuotaRule getQuotaRule() {
if (quotaRuleBuilder_ == null) {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
} else {
return quotaRuleBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQuotaRule(com.google.cloud.netapp.v1.QuotaRule value) {
if (quotaRuleBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
quotaRule_ = value;
} else {
quotaRuleBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQuotaRule(com.google.cloud.netapp.v1.QuotaRule.Builder builderForValue) {
if (quotaRuleBuilder_ == null) {
quotaRule_ = builderForValue.build();
} else {
quotaRuleBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeQuotaRule(com.google.cloud.netapp.v1.QuotaRule value) {
if (quotaRuleBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& quotaRule_ != null
&& quotaRule_ != com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()) {
getQuotaRuleBuilder().mergeFrom(value);
} else {
quotaRule_ = value;
}
} else {
quotaRuleBuilder_.mergeFrom(value);
}
if (quotaRule_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearQuotaRule() {
bitField0_ = (bitField0_ & ~0x00000002);
quotaRule_ = null;
if (quotaRuleBuilder_ != null) {
quotaRuleBuilder_.dispose();
quotaRuleBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.QuotaRule.Builder getQuotaRuleBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getQuotaRuleFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.QuotaRuleOrBuilder getQuotaRuleOrBuilder() {
if (quotaRuleBuilder_ != null) {
return quotaRuleBuilder_.getMessageOrBuilder();
} else {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
}
}
/**
*
*
* <pre>
* Required. The quota rule being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.QuotaRule,
com.google.cloud.netapp.v1.QuotaRule.Builder,
com.google.cloud.netapp.v1.QuotaRuleOrBuilder>
getQuotaRuleFieldBuilder() {
if (quotaRuleBuilder_ == null) {
quotaRuleBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.QuotaRule,
com.google.cloud.netapp.v1.QuotaRule.Builder,
com.google.cloud.netapp.v1.QuotaRuleOrBuilder>(
getQuotaRule(), getParentForChildren(), isClean());
quotaRule_ = null;
}
return quotaRuleBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.UpdateQuotaRuleRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.UpdateQuotaRuleRequest)
private static final com.google.cloud.netapp.v1.UpdateQuotaRuleRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.UpdateQuotaRuleRequest();
}
public static com.google.cloud.netapp.v1.UpdateQuotaRuleRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateQuotaRuleRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateQuotaRuleRequest>() {
@java.lang.Override
public UpdateQuotaRuleRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateQuotaRuleRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateQuotaRuleRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateQuotaRuleRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common | 36,079 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.StorageErrorReporter;
import org.apache.hadoop.hdfs.server.common.Util;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.util.PersistentLongFile;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.DNS;
import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* NNStorage is responsible for management of the StorageDirectories used by
* the NameNode.
*/
@InterfaceAudience.Private
public class NNStorage extends Storage implements Closeable,
StorageErrorReporter {
static final String DEPRECATED_MESSAGE_DIGEST_PROPERTY = "imageMD5Digest";
static final String LOCAL_URI_SCHEME = "file";
//
// The filenames used for storing the images
//
public enum NameNodeFile {
IMAGE ("fsimage"),
TIME ("fstime"), // from "old" pre-HDFS-1073 format
SEEN_TXID ("seen_txid"),
EDITS ("edits"),
IMAGE_NEW ("fsimage.ckpt"),
IMAGE_ROLLBACK("fsimage_rollback"),
EDITS_NEW ("edits.new"), // from "old" pre-HDFS-1073 format
EDITS_INPROGRESS ("edits_inprogress"),
EDITS_TMP ("edits_tmp"),
IMAGE_LEGACY_OIV ("fsimage_legacy_oiv"); // For pre-PB format
private String fileName = null;
private NameNodeFile(String name) { this.fileName = name; }
@VisibleForTesting
public String getName() { return fileName; }
}
/**
* Implementation of StorageDirType specific to namenode storage
* A Storage directory could be of type IMAGE which stores only fsimage,
* or of type EDITS which stores edits or of type IMAGE_AND_EDITS which
* stores both fsimage and edits.
*/
@VisibleForTesting
public static enum NameNodeDirType implements StorageDirType {
UNDEFINED,
IMAGE,
EDITS,
IMAGE_AND_EDITS;
@Override
public StorageDirType getStorageDirType() {
return this;
}
@Override
public boolean isOfType(StorageDirType type) {
if ((this == IMAGE_AND_EDITS) && (type == IMAGE || type == EDITS))
return true;
return this == type;
}
}
protected String blockpoolID = ""; // id of the block pool
/**
* flag that controls if we try to restore failed storages
*/
private boolean restoreFailedStorage = false;
private final Object restorationLock = new Object();
private boolean disablePreUpgradableLayoutCheck = false;
/**
* TxId of the last transaction that was included in the most
* recent fsimage file. This does not include any transactions
* that have since been written to the edit log.
*/
protected volatile long mostRecentCheckpointTxId = HdfsConstants.INVALID_TXID;
/**
* Time of the last checkpoint, in milliseconds since the epoch.
*/
private long mostRecentCheckpointTime = 0;
/**
* list of failed (and thus removed) storages
*/
final protected List<StorageDirectory> removedStorageDirs
= new CopyOnWriteArrayList<StorageDirectory>();
/**
* Properties from old layout versions that may be needed
* during upgrade only.
*/
private HashMap<String, String> deprecatedProperties;
/**
* Construct the NNStorage.
* @param conf Namenode configuration.
* @param imageDirs Directories the image can be stored in.
* @param editsDirs Directories the editlog can be stored in.
* @throws IOException if any directories are inaccessible.
*/
public NNStorage(Configuration conf,
Collection<URI> imageDirs, Collection<URI> editsDirs)
throws IOException {
super(NodeType.NAME_NODE);
storageDirs = new CopyOnWriteArrayList<StorageDirectory>();
// this may modify the editsDirs, so copy before passing in
setStorageDirectories(imageDirs,
Lists.newArrayList(editsDirs),
FSNamesystem.getSharedEditsDirs(conf));
}
@Override // Storage
public boolean isPreUpgradableLayout(StorageDirectory sd) throws IOException {
if (disablePreUpgradableLayoutCheck) {
return false;
}
File oldImageDir = new File(sd.getRoot(), "image");
if (!oldImageDir.exists()) {
return false;
}
// check the layout version inside the image file
File oldF = new File(oldImageDir, "fsimage");
RandomAccessFile oldFile = new RandomAccessFile(oldF, "rws");
try {
oldFile.seek(0);
int oldVersion = oldFile.readInt();
oldFile.close();
oldFile = null;
if (oldVersion < LAST_PRE_UPGRADE_LAYOUT_VERSION)
return false;
} finally {
IOUtils.cleanup(LOG, oldFile);
}
return true;
}
@Override // Closeable
public void close() throws IOException {
unlockAll();
storageDirs.clear();
}
/**
* Set flag whether an attempt should be made to restore failed storage
* directories at the next available oppurtuinity.
*
* @param val Whether restoration attempt should be made.
*/
void setRestoreFailedStorage(boolean val) {
LOG.warn("set restore failed storage to " + val);
restoreFailedStorage=val;
}
/**
* @return Whether failed storage directories are to be restored.
*/
boolean getRestoreFailedStorage() {
return restoreFailedStorage;
}
/**
* See if any of removed storages is "writable" again, and can be returned
* into service.
*/
void attemptRestoreRemovedStorage() {
// if directory is "alive" - copy the images there...
if(!restoreFailedStorage || removedStorageDirs.size() == 0)
return; //nothing to restore
/* We don't want more than one thread trying to restore at a time */
synchronized (this.restorationLock) {
LOG.info("NNStorage.attemptRestoreRemovedStorage: check removed(failed) "+
"storarge. removedStorages size = " + removedStorageDirs.size());
for(Iterator<StorageDirectory> it
= this.removedStorageDirs.iterator(); it.hasNext();) {
StorageDirectory sd = it.next();
File root = sd.getRoot();
LOG.info("currently disabled dir " + root.getAbsolutePath() +
"; type="+sd.getStorageDirType()
+ ";canwrite="+FileUtil.canWrite(root));
if(root.exists() && FileUtil.canWrite(root)) {
LOG.info("restoring dir " + sd.getRoot().getAbsolutePath());
this.addStorageDir(sd); // restore
this.removedStorageDirs.remove(sd);
}
}
}
}
/**
* @return A list of storage directories which are in the errored state.
*/
List<StorageDirectory> getRemovedStorageDirs() {
return this.removedStorageDirs;
}
/**
* See {@link NNStorage#setStorageDirectories(Collection, Collection, Collection)}
*/
@VisibleForTesting
synchronized void setStorageDirectories(Collection<URI> fsNameDirs,
Collection<URI> fsEditsDirs)
throws IOException {
setStorageDirectories(fsNameDirs, fsEditsDirs, new ArrayList<URI>());
}
/**
* Set the storage directories which will be used. This should only ever be
* called from inside NNStorage. However, it needs to remain package private
* for testing, as StorageDirectories need to be reinitialised after using
* Mockito.spy() on this class, as Mockito doesn't work well with inner
* classes, such as StorageDirectory in this case.
*
* Synchronized due to initialization of storageDirs and removedStorageDirs.
*
* @param fsNameDirs Locations to store images.
* @param fsEditsDirs Locations to store edit logs.
* @throws IOException
*/
@VisibleForTesting
synchronized void setStorageDirectories(Collection<URI> fsNameDirs,
Collection<URI> fsEditsDirs,
Collection<URI> sharedEditsDirs)
throws IOException {
this.storageDirs.clear();
this.removedStorageDirs.clear();
// Add all name dirs with appropriate NameNodeDirType
for (URI dirName : fsNameDirs) {
checkSchemeConsistency(dirName);
boolean isAlsoEdits = false;
for (URI editsDirName : fsEditsDirs) {
if (editsDirName.compareTo(dirName) == 0) {
isAlsoEdits = true;
fsEditsDirs.remove(editsDirName);
break;
}
}
NameNodeDirType dirType = (isAlsoEdits) ?
NameNodeDirType.IMAGE_AND_EDITS :
NameNodeDirType.IMAGE;
// Add to the list of storage directories, only if the
// URI is of type file://
if(dirName.getScheme().compareTo("file") == 0) {
this.addStorageDir(new StorageDirectory(new File(dirName.getPath()),
dirType,
sharedEditsDirs.contains(dirName))); // Don't lock the dir if it's shared.
}
}
// Add edits dirs if they are different from name dirs
for (URI dirName : fsEditsDirs) {
checkSchemeConsistency(dirName);
// Add to the list of storage directories, only if the
// URI is of type file://
if(dirName.getScheme().compareTo("file") == 0)
this.addStorageDir(new StorageDirectory(new File(dirName.getPath()),
NameNodeDirType.EDITS, sharedEditsDirs.contains(dirName)));
}
}
/**
* Return the storage directory corresponding to the passed URI
* @param uri URI of a storage directory
* @return The matching storage directory or null if none found
*/
StorageDirectory getStorageDirectory(URI uri) {
try {
uri = Util.fileAsURI(new File(uri));
Iterator<StorageDirectory> it = dirIterator();
for (; it.hasNext(); ) {
StorageDirectory sd = it.next();
if (Util.fileAsURI(sd.getRoot()).equals(uri)) {
return sd;
}
}
} catch (IOException ioe) {
LOG.warn("Error converting file to URI", ioe);
}
return null;
}
/**
* Checks the consistency of a URI, in particular if the scheme
* is specified
* @param u URI whose consistency is being checked.
*/
private static void checkSchemeConsistency(URI u) throws IOException {
String scheme = u.getScheme();
// the URI should have a proper scheme
if(scheme == null) {
throw new IOException("Undefined scheme for " + u);
}
}
/**
* Retrieve current directories of type IMAGE
* @return Collection of URI representing image directories
* @throws IOException in case of URI processing error
*/
Collection<URI> getImageDirectories() throws IOException {
return getDirectories(NameNodeDirType.IMAGE);
}
/**
* Retrieve current directories of type EDITS
* @return Collection of URI representing edits directories
* @throws IOException in case of URI processing error
*/
Collection<URI> getEditsDirectories() throws IOException {
return getDirectories(NameNodeDirType.EDITS);
}
/**
* Return number of storage directories of the given type.
* @param dirType directory type
* @return number of storage directories of type dirType
*/
int getNumStorageDirs(NameNodeDirType dirType) {
if(dirType == null)
return getNumStorageDirs();
Iterator<StorageDirectory> it = dirIterator(dirType);
int numDirs = 0;
for(; it.hasNext(); it.next())
numDirs++;
return numDirs;
}
/**
* Return the list of locations being used for a specific purpose.
* i.e. Image or edit log storage.
*
* @param dirType Purpose of locations requested.
* @throws IOException
*/
Collection<URI> getDirectories(NameNodeDirType dirType)
throws IOException {
ArrayList<URI> list = new ArrayList<URI>();
Iterator<StorageDirectory> it = (dirType == null) ? dirIterator() :
dirIterator(dirType);
for ( ;it.hasNext(); ) {
StorageDirectory sd = it.next();
try {
list.add(Util.fileAsURI(sd.getRoot()));
} catch (IOException e) {
throw new IOException("Exception while processing " +
"StorageDirectory " + sd.getRoot(), e);
}
}
return list;
}
/**
* Determine the last transaction ID noted in this storage directory.
* This txid is stored in a special seen_txid file since it might not
* correspond to the latest image or edit log. For example, an image-only
* directory will have this txid incremented when edits logs roll, even
* though the edits logs are in a different directory.
*
* @param sd StorageDirectory to check
* @return If file exists and can be read, last recorded txid. If not, 0L.
* @throws IOException On errors processing file pointed to by sd
*/
static long readTransactionIdFile(StorageDirectory sd) throws IOException {
File txidFile = getStorageFile(sd, NameNodeFile.SEEN_TXID);
return PersistentLongFile.readFile(txidFile, 0);
}
/**
* Write last checkpoint time into a separate file.
* @param sd storage directory
* @throws IOException
*/
void writeTransactionIdFile(StorageDirectory sd, long txid) throws IOException {
Preconditions.checkArgument(txid >= 0, "bad txid: " + txid);
File txIdFile = getStorageFile(sd, NameNodeFile.SEEN_TXID);
PersistentLongFile.writeFile(txIdFile, txid);
}
/**
* Set the transaction ID and time of the last checkpoint
*
* @param txid transaction id of the last checkpoint
* @param time time of the last checkpoint, in millis since the epoch
*/
void setMostRecentCheckpointInfo(long txid, long time) {
this.mostRecentCheckpointTxId = txid;
this.mostRecentCheckpointTime = time;
}
/**
* @return the transaction ID of the last checkpoint.
*/
public long getMostRecentCheckpointTxId() {
return mostRecentCheckpointTxId;
}
/**
* @return the time of the most recent checkpoint in millis since the epoch.
*/
long getMostRecentCheckpointTime() {
return mostRecentCheckpointTime;
}
/**
* Write a small file in all available storage directories that
* indicates that the namespace has reached some given transaction ID.
*
* This is used when the image is loaded to avoid accidental rollbacks
* in the case where an edit log is fully deleted but there is no
* checkpoint. See TestNameEditsConfigs.testNameEditsConfigsFailure()
* @param txid the txid that has been reached
*/
public void writeTransactionIdFileToStorage(long txid) {
// Write txid marker in all storage directories
for (StorageDirectory sd : storageDirs) {
try {
writeTransactionIdFile(sd, txid);
} catch(IOException e) {
// Close any edits stream associated with this dir and remove directory
LOG.warn("writeTransactionIdToStorage failed on " + sd,
e);
reportErrorsOnDirectory(sd);
}
}
}
/**
* Return the name of the image file that is uploaded by periodic
* checkpointing
*
* @return List of filenames to save checkpoints to.
*/
public File[] getFsImageNameCheckpoint(long txid) {
ArrayList<File> list = new ArrayList<File>();
for (Iterator<StorageDirectory> it =
dirIterator(NameNodeDirType.IMAGE); it.hasNext();) {
list.add(getStorageFile(it.next(), NameNodeFile.IMAGE_NEW, txid));
}
return list.toArray(new File[list.size()]);
}
/**
* @return The first image file with the given txid and image type.
*/
public File getFsImageName(long txid, NameNodeFile nnf) {
for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE);
it.hasNext();) {
StorageDirectory sd = it.next();
File fsImage = getStorageFile(sd, nnf, txid);
if (FileUtil.canRead(sd.getRoot()) && fsImage.exists()) {
return fsImage;
}
}
return null;
}
/**
* @return The first image file whose txid is the same with the given txid and
* image type is one of the given types.
*/
public File getFsImage(long txid, EnumSet<NameNodeFile> nnfs) {
for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE);
it.hasNext();) {
StorageDirectory sd = it.next();
for (NameNodeFile nnf : nnfs) {
File fsImage = getStorageFile(sd, nnf, txid);
if (FileUtil.canRead(sd.getRoot()) && fsImage.exists()) {
return fsImage;
}
}
}
return null;
}
public File getFsImageName(long txid) {
return getFsImageName(txid, NameNodeFile.IMAGE);
}
public File getHighestFsImageName() {
return getFsImageName(getMostRecentCheckpointTxId());
}
/** Create new dfs name directory. Caution: this destroys all files
* in this filesystem. */
private void format(StorageDirectory sd) throws IOException {
sd.clearDirectory(); // create currrent dir
writeProperties(sd);
writeTransactionIdFile(sd, 0);
LOG.info("Storage directory " + sd.getRoot()
+ " has been successfully formatted.");
}
/**
* Format all available storage directories.
*/
public void format(NamespaceInfo nsInfo) throws IOException {
Preconditions.checkArgument(nsInfo.getLayoutVersion() == 0 ||
nsInfo.getLayoutVersion() == HdfsConstants.NAMENODE_LAYOUT_VERSION,
"Bad layout version: %s", nsInfo.getLayoutVersion());
this.setStorageInfo(nsInfo);
this.blockpoolID = nsInfo.getBlockPoolID();
for (Iterator<StorageDirectory> it =
dirIterator(); it.hasNext();) {
StorageDirectory sd = it.next();
format(sd);
}
}
public static NamespaceInfo newNamespaceInfo()
throws UnknownHostException {
return new NamespaceInfo(newNamespaceID(), newClusterID(),
newBlockPoolID(), 0L);
}
public void format() throws IOException {
this.layoutVersion = HdfsConstants.NAMENODE_LAYOUT_VERSION;
for (Iterator<StorageDirectory> it =
dirIterator(); it.hasNext();) {
StorageDirectory sd = it.next();
format(sd);
}
}
/**
* Generate new namespaceID.
*
* namespaceID is a persistent attribute of the namespace.
* It is generated when the namenode is formatted and remains the same
* during the life cycle of the namenode.
* When a datanodes register they receive it as the registrationID,
* which is checked every time the datanode is communicating with the
* namenode. Datanodes that do not 'know' the namespaceID are rejected.
*
* @return new namespaceID
*/
private static int newNamespaceID() {
int newID = 0;
while(newID == 0)
newID = DFSUtil.getRandom().nextInt(0x7FFFFFFF); // use 31 bits only
return newID;
}
@Override // Storage
protected void setFieldsFromProperties(
Properties props, StorageDirectory sd) throws IOException {
super.setFieldsFromProperties(props, sd);
if (layoutVersion == 0) {
throw new IOException("NameNode directory "
+ sd.getRoot() + " is not formatted.");
}
// Set Block pool ID in version with federation support
if (NameNodeLayoutVersion.supports(
LayoutVersion.Feature.FEDERATION, getLayoutVersion())) {
String sbpid = props.getProperty("blockpoolID");
setBlockPoolID(sd.getRoot(), sbpid);
}
setDeprecatedPropertiesForUpgrade(props);
}
/**
* Pull any properties out of the VERSION file that are from older
* versions of HDFS and only necessary during upgrade.
*/
private void setDeprecatedPropertiesForUpgrade(Properties props) {
deprecatedProperties = new HashMap<String, String>();
String md5 = props.getProperty(DEPRECATED_MESSAGE_DIGEST_PROPERTY);
if (md5 != null) {
deprecatedProperties.put(DEPRECATED_MESSAGE_DIGEST_PROPERTY, md5);
}
}
/**
* Return a property that was stored in an earlier version of HDFS.
*
* This should only be used during upgrades.
*/
String getDeprecatedProperty(String prop) {
assert getLayoutVersion() > HdfsConstants.NAMENODE_LAYOUT_VERSION :
"getDeprecatedProperty should only be done when loading " +
"storage from past versions during upgrade.";
return deprecatedProperties.get(prop);
}
/**
* Write version file into the storage directory.
*
* The version file should always be written last.
* Missing or corrupted version file indicates that
* the checkpoint is not valid.
*
* @param sd storage directory
* @throws IOException
*/
@Override // Storage
protected void setPropertiesFromFields(Properties props,
StorageDirectory sd
) throws IOException {
super.setPropertiesFromFields(props, sd);
// Set blockpoolID in version with federation support
if (NameNodeLayoutVersion.supports(
LayoutVersion.Feature.FEDERATION, getLayoutVersion())) {
props.setProperty("blockpoolID", blockpoolID);
}
}
static File getStorageFile(StorageDirectory sd, NameNodeFile type, long imageTxId) {
return new File(sd.getCurrentDir(),
String.format("%s_%019d", type.getName(), imageTxId));
}
/**
* Get a storage file for one of the files that doesn't need a txid associated
* (e.g version, seen_txid)
*/
static File getStorageFile(StorageDirectory sd, NameNodeFile type) {
return new File(sd.getCurrentDir(), type.getName());
}
@VisibleForTesting
public static String getCheckpointImageFileName(long txid) {
return getNameNodeFileName(NameNodeFile.IMAGE_NEW, txid);
}
@VisibleForTesting
public static String getImageFileName(long txid) {
return getNameNodeFileName(NameNodeFile.IMAGE, txid);
}
@VisibleForTesting
public static String getRollbackImageFileName(long txid) {
return getNameNodeFileName(NameNodeFile.IMAGE_ROLLBACK, txid);
}
public static String getLegacyOIVImageFileName(long txid) {
return getNameNodeFileName(NameNodeFile.IMAGE_LEGACY_OIV, txid);
}
private static String getNameNodeFileName(NameNodeFile nnf, long txid) {
return String.format("%s_%019d", nnf.getName(), txid);
}
@VisibleForTesting
public static String getInProgressEditsFileName(long startTxId) {
return getNameNodeFileName(NameNodeFile.EDITS_INPROGRESS, startTxId);
}
static File getInProgressEditsFile(StorageDirectory sd, long startTxId) {
return new File(sd.getCurrentDir(), getInProgressEditsFileName(startTxId));
}
static File getFinalizedEditsFile(StorageDirectory sd,
long startTxId, long endTxId) {
return new File(sd.getCurrentDir(),
getFinalizedEditsFileName(startTxId, endTxId));
}
static File getTemporaryEditsFile(StorageDirectory sd,
long startTxId, long endTxId, long timestamp) {
return new File(sd.getCurrentDir(),
getTemporaryEditsFileName(startTxId, endTxId, timestamp));
}
static File getImageFile(StorageDirectory sd, NameNodeFile nnf, long txid) {
return new File(sd.getCurrentDir(), getNameNodeFileName(nnf, txid));
}
@VisibleForTesting
public static String getFinalizedEditsFileName(long startTxId, long endTxId) {
return String.format("%s_%019d-%019d", NameNodeFile.EDITS.getName(),
startTxId, endTxId);
}
public static String getTemporaryEditsFileName(long startTxId, long endTxId,
long timestamp) {
return String.format("%s_%019d-%019d_%019d", NameNodeFile.EDITS_TMP.getName(),
startTxId, endTxId, timestamp);
}
/**
* Return the first readable finalized edits file for the given txid.
*/
File findFinalizedEditsFile(long startTxId, long endTxId)
throws IOException {
File ret = findFile(NameNodeDirType.EDITS,
getFinalizedEditsFileName(startTxId, endTxId));
if (ret == null) {
throw new IOException(
"No edits file for txid " + startTxId + "-" + endTxId + " exists!");
}
return ret;
}
/**
* Return the first readable image file for the given txid and image type, or
* null if no such image can be found
*/
File findImageFile(NameNodeFile nnf, long txid) {
return findFile(NameNodeDirType.IMAGE,
getNameNodeFileName(nnf, txid));
}
/**
* Return the first readable storage file of the given name
* across any of the 'current' directories in SDs of the
* given type, or null if no such file exists.
*/
private File findFile(NameNodeDirType dirType, String name) {
for (StorageDirectory sd : dirIterable(dirType)) {
File candidate = new File(sd.getCurrentDir(), name);
if (FileUtil.canRead(sd.getCurrentDir()) &&
candidate.exists()) {
return candidate;
}
}
return null;
}
/**
* Disable the check for pre-upgradable layouts. Needed for BackupImage.
* @param val Whether to disable the preupgradeable layout check.
*/
void setDisablePreUpgradableLayoutCheck(boolean val) {
disablePreUpgradableLayoutCheck = val;
}
/**
* Marks a list of directories as having experienced an error.
*
* @param sds A list of storage directories to mark as errored.
*/
void reportErrorsOnDirectories(List<StorageDirectory> sds) {
for (StorageDirectory sd : sds) {
reportErrorsOnDirectory(sd);
}
}
/**
* Reports that a directory has experienced an error.
* Notifies listeners that the directory is no longer
* available.
*
* @param sd A storage directory to mark as errored.
*/
private void reportErrorsOnDirectory(StorageDirectory sd) {
LOG.error("Error reported on storage directory " + sd);
String lsd = listStorageDirectories();
LOG.debug("current list of storage dirs:" + lsd);
LOG.warn("About to remove corresponding storage: "
+ sd.getRoot().getAbsolutePath());
try {
sd.unlock();
} catch (Exception e) {
LOG.warn("Unable to unlock bad storage directory: "
+ sd.getRoot().getPath(), e);
}
if (this.storageDirs.remove(sd)) {
this.removedStorageDirs.add(sd);
}
lsd = listStorageDirectories();
LOG.debug("at the end current list of storage dirs:" + lsd);
}
/**
* Processes the startup options for the clusterid and blockpoolid
* for the upgrade.
* @param startOpt Startup options
* @param layoutVersion Layout version for the upgrade
* @throws IOException
*/
void processStartupOptionsForUpgrade(StartupOption startOpt, int layoutVersion)
throws IOException {
if (startOpt == StartupOption.UPGRADE || startOpt == StartupOption.UPGRADEONLY) {
// If upgrade from a release that does not support federation,
// if clusterId is provided in the startupOptions use it.
// Else generate a new cluster ID
if (!NameNodeLayoutVersion.supports(
LayoutVersion.Feature.FEDERATION, layoutVersion)) {
if (startOpt.getClusterId() == null) {
startOpt.setClusterId(newClusterID());
}
setClusterID(startOpt.getClusterId());
setBlockPoolID(newBlockPoolID());
} else {
// Upgrade from one version of federation to another supported
// version of federation doesn't require clusterID.
// Warn the user if the current clusterid didn't match with the input
// clusterid.
if (startOpt.getClusterId() != null
&& !startOpt.getClusterId().equals(getClusterID())) {
LOG.warn("Clusterid mismatch - current clusterid: " + getClusterID()
+ ", Ignoring given clusterid: " + startOpt.getClusterId());
}
}
LOG.info("Using clusterid: " + getClusterID());
}
}
/**
* Report that an IOE has occurred on some file which may
* or may not be within one of the NN image storage directories.
*/
@Override
public void reportErrorOnFile(File f) {
// We use getAbsolutePath here instead of getCanonicalPath since we know
// that there is some IO problem on that drive.
// getCanonicalPath may need to call stat() or readlink() and it's likely
// those calls would fail due to the same underlying IO problem.
String absPath = f.getAbsolutePath();
for (StorageDirectory sd : storageDirs) {
String dirPath = sd.getRoot().getAbsolutePath();
if (!dirPath.endsWith(File.separator)) {
dirPath += File.separator;
}
if (absPath.startsWith(dirPath)) {
reportErrorsOnDirectory(sd);
return;
}
}
}
/**
* Generate new clusterID.
*
* clusterID is a persistent attribute of the cluster.
* It is generated when the cluster is created and remains the same
* during the life cycle of the cluster. When a new name node is formated, if
* this is a new cluster, a new clusterID is geneated and stored. Subsequent
* name node must be given the same ClusterID during its format to be in the
* same cluster.
* When a datanode register it receive the clusterID and stick with it.
* If at any point, name node or data node tries to join another cluster, it
* will be rejected.
*
* @return new clusterID
*/
public static String newClusterID() {
return "CID-" + UUID.randomUUID().toString();
}
void setClusterID(String cid) {
clusterID = cid;
}
/**
* try to find current cluster id in the VERSION files
* returns first cluster id found in any VERSION file
* null in case none found
* @return clusterId or null in case no cluster id found
*/
public String determineClusterId() {
String cid = null;
Iterator<StorageDirectory> sdit = dirIterator(NameNodeDirType.IMAGE);
while(sdit.hasNext()) {
StorageDirectory sd = sdit.next();
try {
Properties props = readPropertiesFile(sd.getVersionFile());
cid = props.getProperty("clusterID");
LOG.info("current cluster id for sd="+sd.getCurrentDir() +
";lv=" + layoutVersion + ";cid=" + cid);
if(cid != null && !cid.equals(""))
return cid;
} catch (Exception e) {
LOG.warn("this sd not available: " + e.getLocalizedMessage());
} //ignore
}
LOG.warn("couldn't find any VERSION file containing valid ClusterId");
return null;
}
/**
* Generate new blockpoolID.
*
* @return new blockpoolID
*/
static String newBlockPoolID() throws UnknownHostException{
String ip = "unknownIP";
try {
ip = DNS.getDefaultIP("default");
} catch (UnknownHostException e) {
LOG.warn("Could not find ip address of \"default\" inteface.");
throw e;
}
int rand = DFSUtil.getSecureRandom().nextInt(Integer.MAX_VALUE);
String bpid = "BP-" + rand + "-"+ ip + "-" + Time.now();
return bpid;
}
/** Validate and set block pool ID */
void setBlockPoolID(String bpid) {
blockpoolID = bpid;
}
/** Validate and set block pool ID */
private void setBlockPoolID(File storage, String bpid)
throws InconsistentFSStateException {
if (bpid == null || bpid.equals("")) {
throw new InconsistentFSStateException(storage, "file "
+ Storage.STORAGE_FILE_VERSION + " has no block pool Id.");
}
if (!blockpoolID.equals("") && !blockpoolID.equals(bpid)) {
throw new InconsistentFSStateException(storage,
"Unexepcted blockpoolID " + bpid + " . Expected " + blockpoolID);
}
setBlockPoolID(bpid);
}
public String getBlockPoolID() {
return blockpoolID;
}
/**
* Iterate over all current storage directories, inspecting them
* with the given inspector.
*/
void inspectStorageDirs(FSImageStorageInspector inspector)
throws IOException {
// Process each of the storage directories to find the pair of
// newest image file and edit file
for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) {
StorageDirectory sd = it.next();
inspector.inspectDirectory(sd);
}
}
/**
* Iterate over all of the storage dirs, reading their contents to determine
* their layout versions. Returns an FSImageStorageInspector which has
* inspected each directory.
*
* <b>Note:</b> this can mutate the storage info fields (ctime, version, etc).
* @throws IOException if no valid storage dirs are found or no valid layout version
*/
FSImageStorageInspector readAndInspectDirs(EnumSet<NameNodeFile> fileTypes)
throws IOException {
Integer layoutVersion = null;
boolean multipleLV = false;
StringBuilder layoutVersions = new StringBuilder();
// First determine what range of layout versions we're going to inspect
for (Iterator<StorageDirectory> it = dirIterator(false);
it.hasNext();) {
StorageDirectory sd = it.next();
if (!sd.getVersionFile().exists()) {
FSImage.LOG.warn("Storage directory " + sd + " contains no VERSION file. Skipping...");
continue;
}
readProperties(sd); // sets layoutVersion
int lv = getLayoutVersion();
if (layoutVersion == null) {
layoutVersion = Integer.valueOf(lv);
} else if (!layoutVersion.equals(lv)) {
multipleLV = true;
}
layoutVersions.append("(").append(sd.getRoot()).append(", ").append(lv).append(") ");
}
if (layoutVersion == null) {
throw new IOException("No storage directories contained VERSION information");
}
if (multipleLV) {
throw new IOException(
"Storage directories contain multiple layout versions: "
+ layoutVersions);
}
// If the storage directories are with the new layout version
// (ie edits_<txnid>) then use the new inspector, which will ignore
// the old format dirs.
FSImageStorageInspector inspector;
if (NameNodeLayoutVersion.supports(
LayoutVersion.Feature.TXID_BASED_LAYOUT, getLayoutVersion())) {
inspector = new FSImageTransactionalStorageInspector(fileTypes);
} else {
inspector = new FSImagePreTransactionalStorageInspector();
}
inspectStorageDirs(inspector);
return inspector;
}
public NamespaceInfo getNamespaceInfo() {
return new NamespaceInfo(
getNamespaceID(),
getClusterID(),
getBlockPoolID(),
getCTime());
}
}
|
hibernate/hibernate-search | 33,895 | integrationtest/mapper/orm/src/test/java/org/hibernate/search/integrationtest/mapper/orm/automaticindexing/association/bytype/onetoone/ownedbycontaining/AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.onetoone.ownedbycontaining;
import static org.assertj.core.api.Assertions.assertThat;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import jakarta.persistence.Basic;
import jakarta.persistence.CollectionTable;
import jakarta.persistence.Column;
import jakarta.persistence.ElementCollection;
import jakarta.persistence.Embedded;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToOne;
import jakarta.persistence.OrderColumn;
import jakarta.persistence.Transient;
import org.hibernate.annotations.LazyGroup;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.AbstractAutomaticIndexingSingleValuedAssociationBaseIT;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.ContainerPrimitives;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.accessor.MultiValuedPropertyAccessor;
import org.hibernate.search.integrationtest.mapper.orm.automaticindexing.association.bytype.accessor.PropertyAccessor;
import org.hibernate.search.mapper.pojo.automaticindexing.ReindexOnUpdate;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.AssociationInverseSide;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
import org.hibernate.search.util.impl.integrationtest.mapper.orm.OrmSetupHelper;
import org.hibernate.testing.bytecode.enhancement.EnhancementOptions;
import org.hibernate.testing.bytecode.enhancement.extension.BytecodeEnhanced;
import org.junit.jupiter.api.Test;
/**
* Test automatic indexing caused by single-valued association updates
* or by updates of associated (contained) entities,
* with a {@code @OneToOne} association owned by the containing side,
* and with lazy associations on the containing side.
*/
@BytecodeEnhanced // So that we can have lazy *ToOne associations
@EnhancementOptions(lazyLoading = true)
class AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT
extends AbstractAutomaticIndexingSingleValuedAssociationBaseIT<
AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT.IndexedEntity,
AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT.ContainingEntity,
AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT.ContainingEmbeddable,
AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT.ContainedEntity,
AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT.ContainedEmbeddable> {
public AutomaticIndexingOneToOneOwnedByContainingLazyOnContainingSideIT() {
super( IndexedEntity.PRIMITIVES, ContainingEntity.PRIMITIVES, ContainingEmbeddable.PRIMITIVES,
ContainedEntity.PRIMITIVES, ContainedEmbeddable.PRIMITIVES );
}
@Override
protected boolean isAssociationMultiValuedOnContainedSide() {
return false;
}
@Override
protected boolean isAssociationOwnedByContainedSide() {
return false;
}
@Override
protected boolean isAssociationLazyOnContainingSide() {
return true;
}
@Override
protected OrmSetupHelper.SetupContext additionalSetup(OrmSetupHelper.SetupContext setupContext) {
// Avoid problems with deep chains of eager associations in ORM 6
// See https://github.com/hibernate/hibernate-orm/blob/6.0/migration-guide.adoc#fetch-circularity-determination
// See https://hibernate.zulipchat.com/#narrow/stream/132094-hibernate-orm-dev/topic/lazy.20associations.20with.20ORM.206
setupContext.withProperty( AvailableSettings.MAX_FETCH_DEPTH, 1 );
// Necessary for BytecodeEnhancerRunner, see BytecodeEnhancementIT.setup
setupContext.withTcclLookupPrecedenceBefore();
return setupContext;
}
@Test
void testBytecodeEnhancementWorked() {
assertThat( ContainingEntity.class.getDeclaredMethods() )
.extracting( Method::getName )
.anyMatch( name -> name.startsWith( "$$_hibernate_" ) );
}
@Entity(name = "containing")
public static class ContainingEntity {
@Id
private Integer id;
private String nonIndexedField;
@OneToOne
private ContainingEntity parent;
@OneToOne(mappedBy = "parent")
@IndexedEmbedded(includePaths = {
"containedIndexedEmbedded.indexedField",
"containedIndexedEmbedded.indexedElementCollectionField",
"containedIndexedEmbedded.containedDerivedField",
"containedIndexedEmbeddedShallowReindexOnUpdate.indexedField",
"containedIndexedEmbeddedShallowReindexOnUpdate.indexedElementCollectionField",
"containedIndexedEmbeddedShallowReindexOnUpdate.containedDerivedField",
"containedIndexedEmbeddedNoReindexOnUpdate.indexedField",
"containedIndexedEmbeddedNoReindexOnUpdate.indexedElementCollectionField",
"containedIndexedEmbeddedNoReindexOnUpdate.containedDerivedField",
"containedIndexedEmbeddedWithCast.indexedField",
"embeddedAssociations.containedIndexedEmbedded.indexedField",
"embeddedAssociations.containedIndexedEmbedded.indexedElementCollectionField",
"embeddedAssociations.containedIndexedEmbedded.containedDerivedField",
"elementCollectionAssociations.containedIndexedEmbedded.indexedField",
"elementCollectionAssociations.containedIndexedEmbedded.indexedElementCollectionField",
"elementCollectionAssociations.containedIndexedEmbedded.containedDerivedField",
"crossEntityDerivedField"
})
private ContainingEntity child;
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("containedIndexedEmbedded")
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
private ContainedEntity containedIndexedEmbedded;
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("containedNonIndexedEmbedded")
private ContainedEntity containedNonIndexedEmbedded;
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("containedIndexedEmbeddedShallowReindexOnUpdate")
@JoinColumn(name = "CIndexedEmbeddedSROU")
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
@IndexingDependency(reindexOnUpdate = ReindexOnUpdate.SHALLOW)
private ContainedEntity containedIndexedEmbeddedShallowReindexOnUpdate;
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("containedIndexedEmbeddedNoReindexOnUpdate")
@JoinColumn(name = "CIndexedEmbeddedNROU")
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" })
@IndexingDependency(reindexOnUpdate = ReindexOnUpdate.NO)
private ContainedEntity containedIndexedEmbeddedNoReindexOnUpdate;
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("containedUsedInCrossEntityDerivedProperty")
@JoinColumn(name = "CCrossEntityDerived")
private ContainedEntity containedUsedInCrossEntityDerivedProperty;
@OneToOne(targetEntity = ContainedEntity.class, fetch = FetchType.LAZY)
@LazyGroup("containedIndexedEmbeddedWithCast")
@JoinColumn(name = "CIndexedEmbeddedCast")
@IndexedEmbedded(includePaths = { "indexedField" }, targetType = ContainedEntity.class)
private Object containedIndexedEmbeddedWithCast;
@IndexedEmbedded
@Embedded
private ContainingEmbeddable embeddedAssociations;
@IndexedEmbedded
@ElementCollection
@LazyGroup("elementCollectionAssociations")
@Embedded
@OrderColumn(name = "idx")
@CollectionTable(name = "i_ECAssoc")
private List<ContainingEmbeddable> elementCollectionAssociations = new ArrayList<>();
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getNonIndexedField() {
return nonIndexedField;
}
public void setNonIndexedField(String nonIndexedField) {
this.nonIndexedField = nonIndexedField;
}
public ContainingEntity getParent() {
return parent;
}
public void setParent(ContainingEntity parent) {
this.parent = parent;
}
public ContainingEntity getChild() {
return child;
}
public void setChild(ContainingEntity child) {
this.child = child;
}
public ContainedEntity getContainedIndexedEmbedded() {
return containedIndexedEmbedded;
}
public void setContainedIndexedEmbedded(ContainedEntity containedIndexedEmbedded) {
this.containedIndexedEmbedded = containedIndexedEmbedded;
}
public ContainedEntity getContainedNonIndexedEmbedded() {
return containedNonIndexedEmbedded;
}
public void setContainedNonIndexedEmbedded(ContainedEntity containedNonIndexedEmbedded) {
this.containedNonIndexedEmbedded = containedNonIndexedEmbedded;
}
public ContainedEntity getContainedIndexedEmbeddedShallowReindexOnUpdate() {
return containedIndexedEmbeddedShallowReindexOnUpdate;
}
public void setContainedIndexedEmbeddedShallowReindexOnUpdate(
ContainedEntity containedIndexedEmbeddedShallowReindexOnUpdate) {
this.containedIndexedEmbeddedShallowReindexOnUpdate = containedIndexedEmbeddedShallowReindexOnUpdate;
}
public ContainedEntity getContainedIndexedEmbeddedNoReindexOnUpdate() {
return containedIndexedEmbeddedNoReindexOnUpdate;
}
public void setContainedIndexedEmbeddedNoReindexOnUpdate(
ContainedEntity containedIndexedEmbeddedNoReindexOnUpdate) {
this.containedIndexedEmbeddedNoReindexOnUpdate = containedIndexedEmbeddedNoReindexOnUpdate;
}
public ContainedEntity getContainedUsedInCrossEntityDerivedProperty() {
return containedUsedInCrossEntityDerivedProperty;
}
public void setContainedUsedInCrossEntityDerivedProperty(
ContainedEntity containedUsedInCrossEntityDerivedProperty) {
this.containedUsedInCrossEntityDerivedProperty = containedUsedInCrossEntityDerivedProperty;
}
public Object getContainedIndexedEmbeddedWithCast() {
return containedIndexedEmbeddedWithCast;
}
public void setContainedIndexedEmbeddedWithCast(Object containedIndexedEmbeddedWithCast) {
this.containedIndexedEmbeddedWithCast = containedIndexedEmbeddedWithCast;
}
public ContainingEmbeddable getEmbeddedAssociations() {
return embeddedAssociations;
}
public void setEmbeddedAssociations(ContainingEmbeddable embeddedAssociations) {
this.embeddedAssociations = embeddedAssociations;
}
public List<ContainingEmbeddable> getElementCollectionAssociations() {
return elementCollectionAssociations;
}
@Transient
@GenericField
@IndexingDependency(derivedFrom = {
@ObjectPath({
@PropertyValue(propertyName = "containedUsedInCrossEntityDerivedProperty"),
@PropertyValue(propertyName = "fieldUsedInCrossEntityDerivedField1")
}),
@ObjectPath({
@PropertyValue(propertyName = "containedUsedInCrossEntityDerivedProperty"),
@PropertyValue(propertyName = "fieldUsedInCrossEntityDerivedField2")
})
})
public Optional<String> getCrossEntityDerivedField() {
return containedUsedInCrossEntityDerivedProperty == null
? Optional.empty()
: computeDerived( Stream.of(
containedUsedInCrossEntityDerivedProperty.getFieldUsedInCrossEntityDerivedField1(),
containedUsedInCrossEntityDerivedProperty.getFieldUsedInCrossEntityDerivedField2()
) );
}
static final ContainingEntityPrimitives<ContainingEntity, ContainingEmbeddable, ContainedEntity> PRIMITIVES =
new ContainingEntityPrimitives<ContainingEntity, ContainingEmbeddable, ContainedEntity>() {
@Override
public Class<ContainingEntity> entityClass() {
return ContainingEntity.class;
}
@Override
public ContainingEntity newInstance(int id) {
ContainingEntity entity = new ContainingEntity();
entity.setId( id );
return entity;
}
@Override
public PropertyAccessor<ContainingEntity, ContainingEntity> child() {
return PropertyAccessor.create( ContainingEntity::setChild );
}
@Override
public PropertyAccessor<ContainingEntity, ContainingEntity> parent() {
return PropertyAccessor.create( ContainingEntity::setParent );
}
@Override
public PropertyAccessor<ContainingEntity, ContainedEntity> containedIndexedEmbedded() {
return PropertyAccessor.create( ContainingEntity::setContainedIndexedEmbedded,
ContainingEntity::getContainedIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainingEntity, ContainedEntity> containedNonIndexedEmbedded() {
return PropertyAccessor.create( ContainingEntity::setContainedNonIndexedEmbedded,
ContainingEntity::getContainedNonIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainingEntity,
ContainedEntity> containedIndexedEmbeddedShallowReindexOnUpdate() {
return PropertyAccessor.create( ContainingEntity::setContainedIndexedEmbeddedShallowReindexOnUpdate,
ContainingEntity::getContainedIndexedEmbeddedShallowReindexOnUpdate );
}
@Override
public PropertyAccessor<ContainingEntity, ContainedEntity> containedIndexedEmbeddedNoReindexOnUpdate() {
return PropertyAccessor.create( ContainingEntity::setContainedIndexedEmbeddedNoReindexOnUpdate,
ContainingEntity::getContainedIndexedEmbeddedNoReindexOnUpdate );
}
@Override
public PropertyAccessor<ContainingEntity, ContainedEntity> containedUsedInCrossEntityDerivedProperty() {
return PropertyAccessor.create( ContainingEntity::setContainedUsedInCrossEntityDerivedProperty,
ContainingEntity::getContainedUsedInCrossEntityDerivedProperty );
}
@Override
public PropertyAccessor<ContainingEntity, ContainedEntity> containedIndexedEmbeddedWithCast() {
return PropertyAccessor.create( ContainingEntity::setContainedIndexedEmbeddedWithCast );
}
@Override
public PropertyAccessor<ContainingEntity, ContainingEmbeddable> embeddedAssociations() {
return PropertyAccessor.create( ContainingEntity::setEmbeddedAssociations,
ContainingEntity::getEmbeddedAssociations );
}
@Override
public MultiValuedPropertyAccessor<ContainingEntity,
ContainingEmbeddable,
List<ContainingEmbeddable>> elementCollectionAssociations() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainingEntity::getElementCollectionAssociations );
}
@Override
public PropertyAccessor<ContainingEntity, String> nonIndexedField() {
return PropertyAccessor.create( ContainingEntity::setNonIndexedField );
}
};
}
public static class ContainingEmbeddable {
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("embeddable_containedIndexedEmbedded")
@JoinColumn(name = "CEmbIdxEmbedded")
@IndexedEmbedded(includePaths = { "indexedField", "indexedElementCollectionField", "containedDerivedField" },
name = "containedIndexedEmbedded")
private ContainedEntity containedIndexedEmbedded;
@OneToOne(fetch = FetchType.LAZY)
@LazyGroup("embeddable_containedNonIndexedEmbedded")
@JoinColumn(name = "CEmbNonIdxEmbedded")
private ContainedEntity containedNonIndexedEmbedded;
public ContainedEntity getContainedIndexedEmbedded() {
return containedIndexedEmbedded;
}
public void setContainedIndexedEmbedded(ContainedEntity containedIndexedEmbedded) {
this.containedIndexedEmbedded = containedIndexedEmbedded;
}
public ContainedEntity getContainedNonIndexedEmbedded() {
return containedNonIndexedEmbedded;
}
public void setContainedNonIndexedEmbedded(ContainedEntity containedNonIndexedEmbedded) {
this.containedNonIndexedEmbedded = containedNonIndexedEmbedded;
}
static final ContainingEmbeddablePrimitives<ContainingEmbeddable, ContainedEntity> PRIMITIVES =
new ContainingEmbeddablePrimitives<ContainingEmbeddable, ContainedEntity>() {
@Override
public ContainingEmbeddable newInstance() {
return new ContainingEmbeddable();
}
@Override
public PropertyAccessor<ContainingEmbeddable, ContainedEntity> containedIndexedEmbedded() {
return PropertyAccessor.create( ContainingEmbeddable::setContainedIndexedEmbedded,
ContainingEmbeddable::getContainedIndexedEmbedded
);
}
@Override
public PropertyAccessor<ContainingEmbeddable, ContainedEntity> containedNonIndexedEmbedded() {
return PropertyAccessor.create( ContainingEmbeddable::setContainedNonIndexedEmbedded,
ContainingEmbeddable::getContainedNonIndexedEmbedded
);
}
};
}
@Entity(name = "indexed")
@Indexed(index = IndexedEntity.INDEX)
public static class IndexedEntity extends ContainingEntity {
static final String INDEX = "IndexedEntity";
static final IndexedEntityPrimitives<IndexedEntity> PRIMITIVES = new IndexedEntityPrimitives<IndexedEntity>() {
@Override
public Class<IndexedEntity> entityClass() {
return IndexedEntity.class;
}
@Override
public String indexName() {
return IndexedEntity.INDEX;
}
@Override
public IndexedEntity newInstance(int id) {
IndexedEntity entity = new IndexedEntity();
entity.setId( id );
return entity;
}
};
}
@Entity(name = "contained")
public static class ContainedEntity {
@Id
private Integer id;
@OneToOne(mappedBy = "containedIndexedEmbedded")
private ContainingEntity containingAsIndexedEmbedded;
@OneToOne(mappedBy = "containedNonIndexedEmbedded")
private ContainingEntity containingAsNonIndexedEmbedded;
@OneToOne(mappedBy = "containedIndexedEmbeddedShallowReindexOnUpdate")
private ContainingEntity containingAsIndexedEmbeddedShallowReindexOnUpdate;
@OneToOne(mappedBy = "containedIndexedEmbeddedNoReindexOnUpdate")
private ContainingEntity containingAsIndexedEmbeddedNoReindexOnUpdate;
@OneToOne(mappedBy = "containedUsedInCrossEntityDerivedProperty")
private ContainingEntity containingAsUsedInCrossEntityDerivedProperty;
@OneToOne(mappedBy = "containedIndexedEmbeddedWithCast", targetEntity = ContainingEntity.class)
private Object containingAsIndexedEmbeddedWithCast;
@Embedded
private ContainedEmbeddable embeddedAssociations;
/*
* No mappedBy here. The inverse side of associations within an element collection cannot use mappedBy.
* If they do, Hibernate ORM will fail (throw an exception) while attempting to walk down the mappedBy path,
* because it assumes the prefix of that path is an embeddable,
* and in this case it is a List.
* TODO use mappedBy when the above gets fixed in Hibernate ORM
*/
@ManyToOne
@JoinColumn(name = "CECAssocIdxEmb")
@AssociationInverseSide(inversePath = @ObjectPath({
@PropertyValue(propertyName = "elementCollectionAssociations"),
@PropertyValue(propertyName = "containedIndexedEmbedded")
}))
private ContainingEntity containingAsElementCollectionAssociationsIndexedEmbedded;
/*
* No mappedBy here. Same reason as just above.
* TODO use mappedBy when the above gets fixed in Hibernate ORM
*/
@ManyToOne
@JoinColumn(name = "CECAssocNonIdxEmb")
@AssociationInverseSide(inversePath = @ObjectPath({
@PropertyValue(propertyName = "elementCollectionAssociations"),
@PropertyValue(propertyName = "containedNonIndexedEmbedded")
}))
private ContainingEntity containingAsElementCollectionAssociationsNonIndexedEmbedded;
@Basic
@GenericField
private String indexedField;
@ElementCollection
@OrderColumn(name = "idx")
@CollectionTable(name = "indexedECF")
@GenericField
private List<String> indexedElementCollectionField = new ArrayList<>();
@Basic
@GenericField
// Keep this annotation, it should be ignored because the field is not included in the @IndexedEmbedded
private String nonIndexedField;
@ElementCollection
@OrderColumn(name = "idx")
@CollectionTable(name = "nonIndexedECF")
@Column(name = "nonIndexedECF")
@GenericField
// Keep this annotation, it should be ignored because the field is not included in the @IndexedEmbedded
private List<String> nonIndexedElementCollectionField = new ArrayList<>();
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUIContainedDF1")
private String fieldUsedInContainedDerivedField1;
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUIContainedDF2")
private String fieldUsedInContainedDerivedField2;
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUICrossEntityDF1")
private String fieldUsedInCrossEntityDerivedField1;
@Basic // Do not annotate with @GenericField, this would make the test pointless
@Column(name = "FUICrossEntityDF2")
private String fieldUsedInCrossEntityDerivedField2;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public ContainingEntity getContainingAsIndexedEmbedded() {
return containingAsIndexedEmbedded;
}
public void setContainingAsIndexedEmbedded(ContainingEntity containingAsIndexedEmbedded) {
this.containingAsIndexedEmbedded = containingAsIndexedEmbedded;
}
public ContainingEntity getContainingAsNonIndexedEmbedded() {
return containingAsNonIndexedEmbedded;
}
public void setContainingAsNonIndexedEmbedded(ContainingEntity containingAsNonIndexedEmbedded) {
this.containingAsNonIndexedEmbedded = containingAsNonIndexedEmbedded;
}
public ContainingEntity getContainingAsIndexedEmbeddedShallowReindexOnUpdate() {
return containingAsIndexedEmbeddedShallowReindexOnUpdate;
}
public void setContainingAsIndexedEmbeddedShallowReindexOnUpdate(
ContainingEntity containingAsIndexedEmbeddedShallowReindexOnUpdate) {
this.containingAsIndexedEmbeddedShallowReindexOnUpdate = containingAsIndexedEmbeddedShallowReindexOnUpdate;
}
public ContainingEntity getContainingAsIndexedEmbeddedNoReindexOnUpdate() {
return containingAsIndexedEmbeddedNoReindexOnUpdate;
}
public void setContainingAsIndexedEmbeddedNoReindexOnUpdate(
ContainingEntity containingAsIndexedEmbeddedNoReindexOnUpdate) {
this.containingAsIndexedEmbeddedNoReindexOnUpdate = containingAsIndexedEmbeddedNoReindexOnUpdate;
}
public ContainingEntity getContainingAsUsedInCrossEntityDerivedProperty() {
return containingAsUsedInCrossEntityDerivedProperty;
}
public void setContainingAsUsedInCrossEntityDerivedProperty(
ContainingEntity containingAsUsedInCrossEntityDerivedProperty) {
this.containingAsUsedInCrossEntityDerivedProperty = containingAsUsedInCrossEntityDerivedProperty;
}
public Object getContainingAsIndexedEmbeddedWithCast() {
return containingAsIndexedEmbeddedWithCast;
}
public void setContainingAsIndexedEmbeddedWithCast(Object containingAsIndexedEmbeddedWithCast) {
this.containingAsIndexedEmbeddedWithCast = containingAsIndexedEmbeddedWithCast;
}
public ContainedEmbeddable getEmbeddedAssociations() {
return embeddedAssociations;
}
public void setEmbeddedAssociations(ContainedEmbeddable embeddedAssociations) {
this.embeddedAssociations = embeddedAssociations;
}
public ContainingEntity getContainingAsElementCollectionAssociationsIndexedEmbedded() {
return containingAsElementCollectionAssociationsIndexedEmbedded;
}
public void setContainingAsElementCollectionAssociationsIndexedEmbedded(
ContainingEntity containingAsElementCollectionAssociationsIndexedEmbedded) {
this.containingAsElementCollectionAssociationsIndexedEmbedded =
containingAsElementCollectionAssociationsIndexedEmbedded;
}
public ContainingEntity getContainingAsElementCollectionAssociationsNonIndexedEmbedded() {
return containingAsElementCollectionAssociationsNonIndexedEmbedded;
}
public void setContainingAsElementCollectionAssociationsNonIndexedEmbedded(
ContainingEntity containingAsElementCollectionAssociationsNonIndexedEmbedded) {
this.containingAsElementCollectionAssociationsNonIndexedEmbedded =
containingAsElementCollectionAssociationsNonIndexedEmbedded;
}
public String getIndexedField() {
return indexedField;
}
public void setIndexedField(String indexedField) {
this.indexedField = indexedField;
}
public List<String> getIndexedElementCollectionField() {
return indexedElementCollectionField;
}
public void setIndexedElementCollectionField(List<String> indexedElementCollectionField) {
this.indexedElementCollectionField = indexedElementCollectionField;
}
public String getNonIndexedField() {
return nonIndexedField;
}
public void setNonIndexedField(String nonIndexedField) {
this.nonIndexedField = nonIndexedField;
}
public List<String> getNonIndexedElementCollectionField() {
return nonIndexedElementCollectionField;
}
public void setNonIndexedElementCollectionField(List<String> nonIndexedElementCollectionField) {
this.nonIndexedElementCollectionField = nonIndexedElementCollectionField;
}
public String getFieldUsedInContainedDerivedField1() {
return fieldUsedInContainedDerivedField1;
}
public void setFieldUsedInContainedDerivedField1(String fieldUsedInContainedDerivedField1) {
this.fieldUsedInContainedDerivedField1 = fieldUsedInContainedDerivedField1;
}
public String getFieldUsedInContainedDerivedField2() {
return fieldUsedInContainedDerivedField2;
}
public void setFieldUsedInContainedDerivedField2(String fieldUsedInContainedDerivedField2) {
this.fieldUsedInContainedDerivedField2 = fieldUsedInContainedDerivedField2;
}
public String getFieldUsedInCrossEntityDerivedField1() {
return fieldUsedInCrossEntityDerivedField1;
}
public void setFieldUsedInCrossEntityDerivedField1(String fieldUsedInCrossEntityDerivedField1) {
this.fieldUsedInCrossEntityDerivedField1 = fieldUsedInCrossEntityDerivedField1;
}
public String getFieldUsedInCrossEntityDerivedField2() {
return fieldUsedInCrossEntityDerivedField2;
}
public void setFieldUsedInCrossEntityDerivedField2(String fieldUsedInCrossEntityDerivedField2) {
this.fieldUsedInCrossEntityDerivedField2 = fieldUsedInCrossEntityDerivedField2;
}
@Transient
@GenericField
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "fieldUsedInContainedDerivedField1")),
@ObjectPath(@PropertyValue(propertyName = "fieldUsedInContainedDerivedField2"))
})
public Optional<String> getContainedDerivedField() {
return computeDerived( Stream.of( fieldUsedInContainedDerivedField1, fieldUsedInContainedDerivedField2 ) );
}
static ContainedEntityPrimitives<ContainedEntity, ContainedEmbeddable, ContainingEntity> PRIMITIVES =
new ContainedEntityPrimitives<ContainedEntity, ContainedEmbeddable, ContainingEntity>() {
@Override
public Class<ContainedEntity> entityClass() {
return ContainedEntity.class;
}
@Override
public ContainedEntity newInstance(int id) {
ContainedEntity entity = new ContainedEntity();
entity.setId( id );
return entity;
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsIndexedEmbedded() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbedded,
ContainedEntity::getContainingAsIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsNonIndexedEmbedded() {
return PropertyAccessor.create( ContainedEntity::setContainingAsNonIndexedEmbedded,
ContainedEntity::getContainingAsNonIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEntity,
ContainingEntity> containingAsIndexedEmbeddedShallowReindexOnUpdate() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbeddedShallowReindexOnUpdate,
ContainedEntity::getContainingAsIndexedEmbeddedShallowReindexOnUpdate );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsIndexedEmbeddedNoReindexOnUpdate() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbeddedNoReindexOnUpdate,
ContainedEntity::getContainingAsIndexedEmbeddedNoReindexOnUpdate );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsUsedInCrossEntityDerivedProperty() {
return PropertyAccessor.create( ContainedEntity::setContainingAsUsedInCrossEntityDerivedProperty,
ContainedEntity::getContainingAsUsedInCrossEntityDerivedProperty );
}
@Override
public PropertyAccessor<ContainedEntity, ContainingEntity> containingAsIndexedEmbeddedWithCast() {
return PropertyAccessor.create( ContainedEntity::setContainingAsIndexedEmbeddedWithCast );
}
@Override
public PropertyAccessor<ContainedEntity, ContainedEmbeddable> embeddedAssociations() {
return PropertyAccessor.create( ContainedEntity::setEmbeddedAssociations,
ContainedEntity::getEmbeddedAssociations );
}
@Override
public PropertyAccessor<ContainedEntity,
ContainingEntity> containingAsElementCollectionAssociationsIndexedEmbedded() {
return PropertyAccessor.create(
ContainedEntity::setContainingAsElementCollectionAssociationsIndexedEmbedded,
ContainedEntity::getContainingAsElementCollectionAssociationsIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEntity,
ContainingEntity> containingAsElementCollectionAssociationsNonIndexedEmbedded() {
return PropertyAccessor.create(
ContainedEntity::setContainingAsElementCollectionAssociationsNonIndexedEmbedded,
ContainedEntity::getContainingAsElementCollectionAssociationsNonIndexedEmbedded );
}
@Override
public PropertyAccessor<ContainedEntity, String> indexedField() {
return PropertyAccessor.create( ContainedEntity::setIndexedField );
}
@Override
public PropertyAccessor<ContainedEntity, String> nonIndexedField() {
return PropertyAccessor.create( ContainedEntity::setNonIndexedField );
}
@Override
public MultiValuedPropertyAccessor<ContainedEntity, String, List<String>> indexedElementCollectionField() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainedEntity::getIndexedElementCollectionField,
ContainedEntity::setIndexedElementCollectionField );
}
@Override
public MultiValuedPropertyAccessor<ContainedEntity,
String,
List<String>> nonIndexedElementCollectionField() {
return MultiValuedPropertyAccessor.create( ContainerPrimitives.collection(),
ContainedEntity::getNonIndexedElementCollectionField,
ContainedEntity::setNonIndexedElementCollectionField );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInContainedDerivedField1() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInContainedDerivedField1 );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInContainedDerivedField2() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInContainedDerivedField2 );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInCrossEntityDerivedField1() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInCrossEntityDerivedField1 );
}
@Override
public PropertyAccessor<ContainedEntity, String> fieldUsedInCrossEntityDerivedField2() {
return PropertyAccessor.create( ContainedEntity::setFieldUsedInCrossEntityDerivedField2 );
}
};
}
public static class ContainedEmbeddable {
@OneToOne(mappedBy = "embeddedAssociations.containedIndexedEmbedded")
private ContainingEntity containingAsIndexedEmbedded;
@OneToOne(mappedBy = "embeddedAssociations.containedNonIndexedEmbedded")
private ContainingEntity containingAsNonIndexedEmbedded;
public ContainingEntity getContainingAsIndexedEmbedded() {
return containingAsIndexedEmbedded;
}
public void setContainingAsIndexedEmbedded(ContainingEntity containingAsIndexedEmbedded) {
this.containingAsIndexedEmbedded = containingAsIndexedEmbedded;
}
public ContainingEntity getContainingAsNonIndexedEmbedded() {
return containingAsNonIndexedEmbedded;
}
public void setContainingAsNonIndexedEmbedded(ContainingEntity containingAsNonIndexedEmbedded) {
this.containingAsNonIndexedEmbedded = containingAsNonIndexedEmbedded;
}
static ContainedEmbeddablePrimitives<ContainedEmbeddable, ContainingEntity> PRIMITIVES =
new ContainedEmbeddablePrimitives<ContainedEmbeddable, ContainingEntity>() {
@Override
public ContainedEmbeddable newInstance() {
return new ContainedEmbeddable();
}
@Override
public PropertyAccessor<ContainedEmbeddable, ContainingEntity> containingAsIndexedEmbedded() {
return PropertyAccessor.create( ContainedEmbeddable::setContainingAsIndexedEmbedded,
ContainedEmbeddable::getContainingAsIndexedEmbedded
);
}
@Override
public PropertyAccessor<ContainedEmbeddable, ContainingEntity> containingAsNonIndexedEmbedded() {
return PropertyAccessor.create( ContainedEmbeddable::setContainingAsIndexedEmbedded,
ContainedEmbeddable::getContainingAsIndexedEmbedded
);
}
};
}
}
|
googleapis/google-cloud-java | 35,082 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ModelGardenServiceProto.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/model_garden_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
public final class ModelGardenServiceProto {
private ModelGardenServiceProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GetPublisherModelRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GetPublisherModelRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_CustomModel_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_CustomModel_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_ModelConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_ModelConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_EndpointConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_EndpointConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_SystemLabelsEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_SystemLabelsEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_CheckPublisherModelEulaAcceptanceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_CheckPublisherModelEulaAcceptanceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_AcceptPublisherModelEulaRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_AcceptPublisherModelEulaRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_PublisherModelEulaAcceptance_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_PublisherModelEulaAcceptance_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ ":google/cloud/aiplatform/v1beta1/model_garden_service.proto\022\037google.cloud.aipla"
+ "tform.v1beta1\032\034google/api/annotations.pr"
+ "oto\032\027google/api/client.proto\032\037google/api"
+ "/field_behavior.proto\032\031google/api/resour"
+ "ce.proto\032(google/cloud/aiplatform/v1beta1/io.proto\0327google/cloud/aiplatform/v1be"
+ "ta1/machine_resources.proto\032+google/cloud/aiplatform/v1beta1/model.proto\032/google"
+ "/cloud/aiplatform/v1beta1/operation.proto\0325google/cloud/aiplatform/v1beta1/publi"
+ "sher_model.proto\0328google/cloud/aiplatfor"
+ "m/v1beta1/service_networking.proto\032#google/longrunning/operations.proto\"\312\002\n"
+ "\030GetPublisherModelRequest\022>\n"
+ "\004name\030\001 \001(\tB0\340A\002\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\022\032\n\r"
+ "language_code\030\002 \001(\tB\003\340A\001\022F\n"
+ "\004view\030\003"
+ " \001(\01623.google.cloud.aiplatform.v1beta1.PublisherModelViewB\003\340A\001\022\"\n"
+ "\025is_hugging_face_model\030\005 \001(\010B\003\340A\001\022\037\n"
+ "\022hugging_face_token\030\006 \001(\tB\003\340A\001\022E\n"
+ "8include_equivalent_model_garden_model_deployment_configs\030\007"
+ " \001(\010B\003\340A\001\"\222\002\n"
+ "\032ListPublisherModelsRequest\022\023\n"
+ "\006parent\030\001 \001(\tB\003\340A\002\022\023\n"
+ "\006filter\030\002 \001(\tB\003\340A\001\022\026\n"
+ "\tpage_size\030\003 \001(\005B\003\340A\001\022\027\n\n"
+ "page_token\030\004 \001(\tB\003\340A\001\022F\n"
+ "\004view\030\005 \001(\01623.google.cloud.aipl"
+ "atform.v1beta1.PublisherModelViewB\003\340A\001\022\025\n"
+ "\010order_by\030\006 \001(\tB\003\340A\001\022\032\n\r"
+ "language_code\030\007 \001(\tB\003\340A\001\022\036\n"
+ "\021list_all_versions\030\010 \001(\010B\003\340A\001\"\201\001\n"
+ "\033ListPublisherModelsResponse\022I\n"
+ "\020publisher_models\030\001"
+ " \003(\0132/.google.cloud.aiplatform.v1beta1.PublisherModel\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"\261\013\n\r"
+ "DeployRequest\022M\n"
+ "\024publisher_model_name\030\001 \001(\tB-\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModelH\000\022\037\n"
+ "\025hugging_face_model_id\030\002 \001(\tH\000\022R\n"
+ "\014custom_model\030\003"
+ " \001(\0132:.google.cloud.aiplatform.v1beta1.DeployRequest.CustomModelH\000\022>\n"
+ "\013destination\030\004 \001(\tB)\340A\002\372A#\n"
+ "!locations.googleapis.com/Location\022U\n"
+ "\014model_config\030\005 \001(\0132:.goog"
+ "le.cloud.aiplatform.v1beta1.DeployRequest.ModelConfigB\003\340A\001\022[\n"
+ "\017endpoint_config\030\006 "
+ "\001(\0132=.google.cloud.aiplatform.v1beta1.DeployRequest.EndpointConfigB\003\340A\001\022W\n\r"
+ "deploy_config\030\007 \001(\0132;.google.cloud.aiplatform"
+ ".v1beta1.DeployRequest.DeployConfigB\003\340A\001\0325\n"
+ "\013CustomModel\022\026\n"
+ "\007gcs_uri\030\002 \001(\tB\003\340A\005H\000B\016\n"
+ "\014model_source\032\207\002\n"
+ "\013ModelConfig\022\030\n"
+ "\013accept_eula\030\001 \001(\010B\003\340A\001\022&\n"
+ "\031hugging_face_access_token\030\002 \001(\tB\003\340A\001\022\'\n"
+ "\032hugging_face_cache_enabled\030\003 \001(\010B\003\340A\001\022\037\n"
+ "\022model_display_name\030\004 \001(\tB\003\340A\001\022P\n"
+ "\016container_spec\030\005 \001(\01323.go"
+ "ogle.cloud.aiplatform.v1beta1.ModelContainerSpecB\003\340A\001\022\032\n\r"
+ "model_user_id\030\006 \001(\tB\003\340A\001\032\226\002\n"
+ "\016EndpointConfig\022\"\n"
+ "\025endpoint_display_name\030\001 \001(\tB\003\340A\001\022)\n"
+ "\032dedicated_endpoint_enabled\030\002 \001(\010B\005\030\001\340A\001\022(\n"
+ "\033dedicated_endpoint_disabled\030\004 \001(\010B\003\340A\001\022i\n"
+ "\036private_service_connect_config\030\005 \001(\0132<.google.cloud.aip"
+ "latform.v1beta1.PrivateServiceConnectConfigB\003\340A\001\022 \n"
+ "\020endpoint_user_id\030\003 \001(\tB\006\340A\005\340A\001\032\247\002\n"
+ "\014DeployConfig\022U\n"
+ "\023dedicated_resources\030\001"
+ " \001(\01323.google.cloud.aiplatform.v1beta1.DedicatedResourcesB\003\340A\001\022 \n"
+ "\023fast_tryout_enabled\030\002 \001(\010B\003\340A\001\022i\n\r"
+ "system_labels\030\003 \003(\0132M.google.cloud.aiplatform.v1beta1.De"
+ "ployRequest.DeployConfig.SystemLabelsEntryB\003\340A\001\0323\n"
+ "\021SystemLabelsEntry\022\013\n"
+ "\003key\030\001 \001(\t\022\r\n"
+ "\005value\030\002 \001(\t:\0028\001B\013\n"
+ "\tartifacts\"\323\002\n"
+ "\033DeployPublisherModelRequest\022\022\n"
+ "\005model\030\001 \001(\tB\003\340A\002\022>\n"
+ "\013destination\030\002 \001(\tB)\340A\002\372A#\n"
+ "!locations.googleapis.com/Location\022\"\n"
+ "\025endpoint_display_name\030\003 \001(\tB\003\340A\001\022U\n"
+ "\023dedicated_resources\030\004"
+ " \001(\01323.google.cloud.aiplatform.v1beta1.DedicatedResourcesB\003\340A\001\022\037\n"
+ "\022model_display_name\030\005 \001(\tB\003\340A\001\022&\n"
+ "\031hugging_face_access_token\030\006 \001(\tB\003\340A\001\022\030\n"
+ "\013accept_eula\030\007 \001(\010B\003\340A\001:\002\030\001\"\321\001\n"
+ "\016DeployResponse\022I\n"
+ "\017publisher_model\030\001 \001(\tB0\340A\003\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\022<\n"
+ "\010endpoint\030\002 \001(\tB*\340A\003\372A$\n"
+ "\"aiplatform.googleapis.com/Endpoint\0226\n"
+ "\005model\030\003 \001(\tB\'\340A\003\372A!\n"
+ "\037aiplatform.googleapis.com/Model\"\343\001\n"
+ "\034DeployPublisherModelResponse\022I\n"
+ "\017publisher_model\030\001 \001(\tB0\340A\003\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\022<\n"
+ "\010endpoint\030\002 \001(\tB*\340A\003\372A$\n"
+ "\"aiplatform.googleapis.com/Endpoint\0226\n"
+ "\005model\030\003 \001(\tB\'\340A\003\372A!\n"
+ "\037aiplatform.googleapis.com/Model:\002\030\001\"\255\002\n"
+ "\027DeployOperationMetadata\022S\n"
+ "\020generic_metadata\030\001 \001(\01329.google"
+ ".cloud.aiplatform.v1beta1.GenericOperationMetadata\022I\n"
+ "\017publisher_model\030\002 \001(\tB0\340A\003\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\022>\n"
+ "\013destination\030\003 \001(\tB)\340A\003\372A#\n"
+ "!locations.googleapis.com/Location\022\033\n"
+ "\016project_number\030\004 \001(\003B\003\340A\003\022\025\n"
+ "\010model_id\030\005 \001(\tB\003\340A\003\"\250\002\n"
+ "%DeployPublisherModelOperationMetadata\022S\n"
+ "\020generic_metadata\030\001 \001(\01329.google.c"
+ "loud.aiplatform.v1beta1.GenericOperationMetadata\022I\n"
+ "\017publisher_model\030\002 \001(\tB0\340A\003\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\022>\n"
+ "\013destination\030\003 \001(\tB)\340A\003\372A#\n"
+ "!locations.googleapis.com/Location\022\033\n"
+ "\016project_number\030\004 \001(\003B\003\340A\003:\002\030\001\"P\n"
+ "\034ExportPublisherModelResponse\022\027\n"
+ "\017publisher_model\030\001 \001(\t\022\027\n"
+ "\017destination_uri\030\002 \001(\t\"|\n"
+ "%ExportPublisherModelOperationMetadata\022S\n"
+ "\020generic_metadata\030\001"
+ " \001(\01329.google.cloud.aiplatform.v1beta1.GenericOperationMetadata\"\266\001\n"
+ "\033ExportPublisherModelRequest\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\002\022I\n"
+ "\013destination\030\002"
+ " \001(\0132/.google.cloud.aiplatform.v1beta1.GcsDestinationB\003\340A\002\0229\n"
+ "\006parent\030\003 \001(\tB)\340A\002\372A#\n"
+ "!locations.googleapis.com/Location\"\272\001\n"
+ "(CheckPublisherModelEulaAcceptanceRequest\022C\n"
+ "\006parent\030\001 \001(\tB3\340A\002\372A-\n"
+ "+cloudresourcemanager.googleapis.com/Project\022I\n"
+ "\017publisher_model\030\002 \001(\tB0\340A\002\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\"\261\001\n"
+ "\037AcceptPublisherModelEulaRequest\022C\n"
+ "\006parent\030\001 \001(\tB3\340A\002\372A-\n"
+ "+cloudresourcemanager.googleapis.com/Project\022I\n"
+ "\017publisher_model\030\002 \001(\tB0\340A\002\372A*\n"
+ "(aiplatform.googleapis.com/PublisherModel\"s\n"
+ "\034PublisherModelEulaAcceptance\022\026\n"
+ "\016project_number\030\001 \001(\003\022\027\n"
+ "\017publisher_model\030\002 \001(\t\022\"\n"
+ "\032publisher_model_eula_acked\030\003 \001(\010*\241\001\n"
+ "\022PublisherModelView\022$\n"
+ " PUBLISHER_MODEL_VIEW_UNSPECIFIED\020\000\022\036\n"
+ "\032PUBLISHER_MODEL_VIEW_BASIC\020\001\022\035\n"
+ "\031PUBLISHER_MODEL_VIEW_FULL\020\002\022&\n"
+ "\"PUBLISHER_MODEL_VERSION_VIEW_BASIC\020\0032\325\r\n"
+ "\022ModelGardenService\022\265\001\n"
+ "\021GetPublisherModel\0229.google.cloud.aiplatform.v1beta1.GetPublisherModel"
+ "Request\032/.google.cloud.aiplatform.v1beta"
+ "1.PublisherModel\"4\332A\004name\202\323\344\223\002\'\022%/v1beta1/{name=publishers/*/models/*}\022\310\001\n"
+ "\023ListPublisherModels\022;.google.cloud.aiplatform"
+ ".v1beta1.ListPublisherModelsRequest\032<.google.cloud.aiplatform.v1beta1.ListPublis"
+ "herModelsResponse\"6\332A\006parent\202\323\344\223\002\'\022%/v1beta1/{parent=publishers/*}/models\022\304\001\n"
+ "\006Deploy\022..google.cloud.aiplatform.v1beta1.D"
+ "eployRequest\032\035.google.longrunning.Operation\"k\312A)\n"
+ "\016DeployResponse\022\027DeployOperatio"
+ "nMetadata\202\323\344\223\0029\"4/v1beta1/{destination=projects/*/locations/*}:deploy:\001*\022\216\002\n"
+ "\024DeployPublisherModel\022<.google.cloud.aiplatf"
+ "orm.v1beta1.DeployPublisherModelRequest\032\035.google.longrunning.Operation\"\230\001\210\002\001\312AE\n"
+ "\034DeployPublisherModelResponse\022%DeployPub"
+ "lisherModelOperationMetadata\202\323\344\223\002G\"B/v1b"
+ "eta1/{destination=projects/*/locations/*}:deployPublisherModel:\001*\022\225\002\n"
+ "\024ExportPublisherModel\022<.google.cloud.aiplatform.v1b"
+ "eta1.ExportPublisherModelRequest\032\035.google.longrunning.Operation\"\237\001\312AE\n"
+ "\034ExportPublisherModelResponse\022%ExportPublisherMode"
+ "lOperationMetadata\202\323\344\223\002Q\"L/v1beta1/{pare"
+ "nt=projects/*/locations/*}/{name=publishers/*/models/*}:export:\001*\022\205\002\n"
+ "!CheckPublisherModelEulaAcceptance\022I.google.cloud.a"
+ "iplatform.v1beta1.CheckPublisherModelEulaAcceptanceRequest\032=.google.cloud.aiplat"
+ "form.v1beta1.PublisherModelEulaAcceptanc"
+ "e\"V\332A\026parent,publisher_model\202\323\344\223\0027\"2/v1b"
+ "eta1/{parent=projects/*}/modelGardenEula:check:\001*\022\364\001\n"
+ "\030AcceptPublisherModelEula\022@.google.cloud.aiplatform.v1beta1.AcceptP"
+ "ublisherModelEulaRequest\032=.google.cloud.aiplatform.v1beta1.PublisherModelEulaAcc"
+ "eptance\"W\332A\026parent,publisher_model\202\323\344\223\0028"
+ "\"3/v1beta1/{parent=projects/*}/modelGard"
+ "enEula:accept:\001*\032M\312A\031aiplatform.googleap"
+ "is.com\322A.https://www.googleapis.com/auth/cloud-platformB\356\001\n"
+ "#com.google.cloud.aiplatform.v1beta1B\027ModelGardenServiceProto"
+ "P\001ZCcloud.google.com/go/aiplatform/apiv1"
+ "beta1/aiplatformpb;aiplatformpb\252\002\037Google"
+ ".Cloud.AIPlatform.V1Beta1\312\002\037Google\\Cloud"
+ "\\AIPlatform\\V1beta1\352\002\"Google::Cloud::AIPlatform::V1beta1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.MachineResourcesProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.ModelProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.PublisherModelProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.ServiceNetworkingProto.getDescriptor(),
com.google.longrunning.OperationsProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1beta1_GetPublisherModelRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_aiplatform_v1beta1_GetPublisherModelRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GetPublisherModelRequest_descriptor,
new java.lang.String[] {
"Name",
"LanguageCode",
"View",
"IsHuggingFaceModel",
"HuggingFaceToken",
"IncludeEquivalentModelGardenModelDeploymentConfigs",
});
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsRequest_descriptor,
new java.lang.String[] {
"Parent",
"Filter",
"PageSize",
"PageToken",
"View",
"OrderBy",
"LanguageCode",
"ListAllVersions",
});
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsResponse_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ListPublisherModelsResponse_descriptor,
new java.lang.String[] {
"PublisherModels", "NextPageToken",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor,
new java.lang.String[] {
"PublisherModelName",
"HuggingFaceModelId",
"CustomModel",
"Destination",
"ModelConfig",
"EndpointConfig",
"DeployConfig",
"Artifacts",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_CustomModel_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_CustomModel_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_CustomModel_descriptor,
new java.lang.String[] {
"GcsUri", "ModelSource",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_ModelConfig_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_ModelConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_ModelConfig_descriptor,
new java.lang.String[] {
"AcceptEula",
"HuggingFaceAccessToken",
"HuggingFaceCacheEnabled",
"ModelDisplayName",
"ContainerSpec",
"ModelUserId",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_EndpointConfig_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor
.getNestedTypes()
.get(2);
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_EndpointConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_EndpointConfig_descriptor,
new java.lang.String[] {
"EndpointDisplayName",
"DedicatedEndpointEnabled",
"DedicatedEndpointDisabled",
"PrivateServiceConnectConfig",
"EndpointUserId",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_descriptor
.getNestedTypes()
.get(3);
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_descriptor,
new java.lang.String[] {
"DedicatedResources", "FastTryoutEnabled", "SystemLabels",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_SystemLabelsEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_SystemLabelsEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployRequest_DeployConfig_SystemLabelsEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelRequest_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelRequest_descriptor,
new java.lang.String[] {
"Model",
"Destination",
"EndpointDisplayName",
"DedicatedResources",
"ModelDisplayName",
"HuggingFaceAccessToken",
"AcceptEula",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_descriptor,
new java.lang.String[] {
"PublisherModel", "Endpoint", "Model",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_descriptor,
new java.lang.String[] {
"PublisherModel", "Endpoint", "Model",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_aiplatform_v1beta1_DeployOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata", "PublisherModel", "Destination", "ProjectNumber", "ModelId",
});
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata", "PublisherModel", "Destination", "ProjectNumber",
});
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelResponse_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelResponse_descriptor,
new java.lang.String[] {
"PublisherModel", "DestinationUri",
});
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelRequest_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ExportPublisherModelRequest_descriptor,
new java.lang.String[] {
"Name", "Destination", "Parent",
});
internal_static_google_cloud_aiplatform_v1beta1_CheckPublisherModelEulaAcceptanceRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_aiplatform_v1beta1_CheckPublisherModelEulaAcceptanceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_CheckPublisherModelEulaAcceptanceRequest_descriptor,
new java.lang.String[] {
"Parent", "PublisherModel",
});
internal_static_google_cloud_aiplatform_v1beta1_AcceptPublisherModelEulaRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_aiplatform_v1beta1_AcceptPublisherModelEulaRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_AcceptPublisherModelEulaRequest_descriptor,
new java.lang.String[] {
"Parent", "PublisherModel",
});
internal_static_google_cloud_aiplatform_v1beta1_PublisherModelEulaAcceptance_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_aiplatform_v1beta1_PublisherModelEulaAcceptance_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_PublisherModelEulaAcceptance_descriptor,
new java.lang.String[] {
"ProjectNumber", "PublisherModel", "PublisherModelEulaAcked",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resourceReference);
registry.add(com.google.longrunning.OperationsProto.operationInfo);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.IoProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.MachineResourcesProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.ModelProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.PublisherModelProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.ServiceNetworkingProto.getDescriptor();
com.google.longrunning.OperationsProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
googleapis/google-cloud-java | 36,090 | java-notebooks/proto-google-cloud-notebooks-v1/src/main/java/com/google/cloud/notebooks/v1/RuntimeAccessConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v1/runtime.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v1;
/**
*
*
* <pre>
* Specifies the login configuration for Runtime
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1.RuntimeAccessConfig}
*/
public final class RuntimeAccessConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1.RuntimeAccessConfig)
RuntimeAccessConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use RuntimeAccessConfig.newBuilder() to construct.
private RuntimeAccessConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RuntimeAccessConfig() {
accessType_ = 0;
runtimeOwner_ = "";
proxyUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RuntimeAccessConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_RuntimeAccessConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_RuntimeAccessConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1.RuntimeAccessConfig.class,
com.google.cloud.notebooks.v1.RuntimeAccessConfig.Builder.class);
}
/**
*
*
* <pre>
* Possible ways to access runtime. Authentication mode.
* Currently supports: Single User only.
* </pre>
*
* Protobuf enum {@code google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType}
*/
public enum RuntimeAccessType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified access.
* </pre>
*
* <code>RUNTIME_ACCESS_TYPE_UNSPECIFIED = 0;</code>
*/
RUNTIME_ACCESS_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Single user login.
* </pre>
*
* <code>SINGLE_USER = 1;</code>
*/
SINGLE_USER(1),
/**
*
*
* <pre>
* Service Account mode.
* In Service Account mode, Runtime creator will specify a SA that exists
* in the consumer project. Using Runtime Service Account field.
* Users accessing the Runtime need ActAs (Service Account User) permission.
* </pre>
*
* <code>SERVICE_ACCOUNT = 2;</code>
*/
SERVICE_ACCOUNT(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified access.
* </pre>
*
* <code>RUNTIME_ACCESS_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int RUNTIME_ACCESS_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Single user login.
* </pre>
*
* <code>SINGLE_USER = 1;</code>
*/
public static final int SINGLE_USER_VALUE = 1;
/**
*
*
* <pre>
* Service Account mode.
* In Service Account mode, Runtime creator will specify a SA that exists
* in the consumer project. Using Runtime Service Account field.
* Users accessing the Runtime need ActAs (Service Account User) permission.
* </pre>
*
* <code>SERVICE_ACCOUNT = 2;</code>
*/
public static final int SERVICE_ACCOUNT_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RuntimeAccessType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static RuntimeAccessType forNumber(int value) {
switch (value) {
case 0:
return RUNTIME_ACCESS_TYPE_UNSPECIFIED;
case 1:
return SINGLE_USER;
case 2:
return SERVICE_ACCOUNT;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<RuntimeAccessType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<RuntimeAccessType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<RuntimeAccessType>() {
public RuntimeAccessType findValueByNumber(int number) {
return RuntimeAccessType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.notebooks.v1.RuntimeAccessConfig.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final RuntimeAccessType[] VALUES = values();
public static RuntimeAccessType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private RuntimeAccessType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType)
}
public static final int ACCESS_TYPE_FIELD_NUMBER = 1;
private int accessType_ = 0;
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;</code>
*
* @return The enum numeric value on the wire for accessType.
*/
@java.lang.Override
public int getAccessTypeValue() {
return accessType_;
}
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;</code>
*
* @return The accessType.
*/
@java.lang.Override
public com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType getAccessType() {
com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType result =
com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType.forNumber(accessType_);
return result == null
? com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType.UNRECOGNIZED
: result;
}
public static final int RUNTIME_OWNER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object runtimeOwner_ = "";
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @return The runtimeOwner.
*/
@java.lang.Override
public java.lang.String getRuntimeOwner() {
java.lang.Object ref = runtimeOwner_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
runtimeOwner_ = s;
return s;
}
}
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @return The bytes for runtimeOwner.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRuntimeOwnerBytes() {
java.lang.Object ref = runtimeOwner_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
runtimeOwner_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROXY_URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object proxyUri_ = "";
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The proxyUri.
*/
@java.lang.Override
public java.lang.String getProxyUri() {
java.lang.Object ref = proxyUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
proxyUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for proxyUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProxyUriBytes() {
java.lang.Object ref = proxyUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
proxyUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (accessType_
!= com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType
.RUNTIME_ACCESS_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, accessType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(runtimeOwner_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, runtimeOwner_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(proxyUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, proxyUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (accessType_
!= com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType
.RUNTIME_ACCESS_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, accessType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(runtimeOwner_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, runtimeOwner_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(proxyUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, proxyUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v1.RuntimeAccessConfig)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v1.RuntimeAccessConfig other =
(com.google.cloud.notebooks.v1.RuntimeAccessConfig) obj;
if (accessType_ != other.accessType_) return false;
if (!getRuntimeOwner().equals(other.getRuntimeOwner())) return false;
if (!getProxyUri().equals(other.getProxyUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ACCESS_TYPE_FIELD_NUMBER;
hash = (53 * hash) + accessType_;
hash = (37 * hash) + RUNTIME_OWNER_FIELD_NUMBER;
hash = (53 * hash) + getRuntimeOwner().hashCode();
hash = (37 * hash) + PROXY_URI_FIELD_NUMBER;
hash = (53 * hash) + getProxyUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.notebooks.v1.RuntimeAccessConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Specifies the login configuration for Runtime
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1.RuntimeAccessConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1.RuntimeAccessConfig)
com.google.cloud.notebooks.v1.RuntimeAccessConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_RuntimeAccessConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_RuntimeAccessConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1.RuntimeAccessConfig.class,
com.google.cloud.notebooks.v1.RuntimeAccessConfig.Builder.class);
}
// Construct using com.google.cloud.notebooks.v1.RuntimeAccessConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
accessType_ = 0;
runtimeOwner_ = "";
proxyUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v1.RuntimeProto
.internal_static_google_cloud_notebooks_v1_RuntimeAccessConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.RuntimeAccessConfig getDefaultInstanceForType() {
return com.google.cloud.notebooks.v1.RuntimeAccessConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v1.RuntimeAccessConfig build() {
com.google.cloud.notebooks.v1.RuntimeAccessConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.RuntimeAccessConfig buildPartial() {
com.google.cloud.notebooks.v1.RuntimeAccessConfig result =
new com.google.cloud.notebooks.v1.RuntimeAccessConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v1.RuntimeAccessConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.accessType_ = accessType_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.runtimeOwner_ = runtimeOwner_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.proxyUri_ = proxyUri_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v1.RuntimeAccessConfig) {
return mergeFrom((com.google.cloud.notebooks.v1.RuntimeAccessConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v1.RuntimeAccessConfig other) {
if (other == com.google.cloud.notebooks.v1.RuntimeAccessConfig.getDefaultInstance())
return this;
if (other.accessType_ != 0) {
setAccessTypeValue(other.getAccessTypeValue());
}
if (!other.getRuntimeOwner().isEmpty()) {
runtimeOwner_ = other.runtimeOwner_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getProxyUri().isEmpty()) {
proxyUri_ = other.proxyUri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
accessType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
runtimeOwner_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
proxyUri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int accessType_ = 0;
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;
* </code>
*
* @return The enum numeric value on the wire for accessType.
*/
@java.lang.Override
public int getAccessTypeValue() {
return accessType_;
}
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;
* </code>
*
* @param value The enum numeric value on the wire for accessType to set.
* @return This builder for chaining.
*/
public Builder setAccessTypeValue(int value) {
accessType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;
* </code>
*
* @return The accessType.
*/
@java.lang.Override
public com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType getAccessType() {
com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType result =
com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType.forNumber(
accessType_);
return result == null
? com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;
* </code>
*
* @param value The accessType to set.
* @return This builder for chaining.
*/
public Builder setAccessType(
com.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
accessType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The type of access mode this instance.
* </pre>
*
* <code>.google.cloud.notebooks.v1.RuntimeAccessConfig.RuntimeAccessType access_type = 1;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearAccessType() {
bitField0_ = (bitField0_ & ~0x00000001);
accessType_ = 0;
onChanged();
return this;
}
private java.lang.Object runtimeOwner_ = "";
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @return The runtimeOwner.
*/
public java.lang.String getRuntimeOwner() {
java.lang.Object ref = runtimeOwner_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
runtimeOwner_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @return The bytes for runtimeOwner.
*/
public com.google.protobuf.ByteString getRuntimeOwnerBytes() {
java.lang.Object ref = runtimeOwner_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
runtimeOwner_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @param value The runtimeOwner to set.
* @return This builder for chaining.
*/
public Builder setRuntimeOwner(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
runtimeOwner_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearRuntimeOwner() {
runtimeOwner_ = getDefaultInstance().getRuntimeOwner();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The owner of this runtime after creation. Format: `alias@example.com`
* Currently supports one owner only.
* </pre>
*
* <code>string runtime_owner = 2;</code>
*
* @param value The bytes for runtimeOwner to set.
* @return This builder for chaining.
*/
public Builder setRuntimeOwnerBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
runtimeOwner_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object proxyUri_ = "";
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The proxyUri.
*/
public java.lang.String getProxyUri() {
java.lang.Object ref = proxyUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
proxyUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for proxyUri.
*/
public com.google.protobuf.ByteString getProxyUriBytes() {
java.lang.Object ref = proxyUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
proxyUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The proxyUri to set.
* @return This builder for chaining.
*/
public Builder setProxyUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
proxyUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearProxyUri() {
proxyUri_ = getDefaultInstance().getProxyUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The proxy endpoint that is used to access the runtime.
* </pre>
*
* <code>string proxy_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for proxyUri to set.
* @return This builder for chaining.
*/
public Builder setProxyUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
proxyUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1.RuntimeAccessConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1.RuntimeAccessConfig)
private static final com.google.cloud.notebooks.v1.RuntimeAccessConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1.RuntimeAccessConfig();
}
public static com.google.cloud.notebooks.v1.RuntimeAccessConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RuntimeAccessConfig> PARSER =
new com.google.protobuf.AbstractParser<RuntimeAccessConfig>() {
@java.lang.Override
public RuntimeAccessConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RuntimeAccessConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RuntimeAccessConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.RuntimeAccessConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,114 | java-recommender/proto-google-cloud-recommender-v1/src/main/java/com/google/cloud/recommender/v1/ListInsightsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommender/v1/recommender_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommender.v1;
/**
*
*
* <pre>
* Response to the `ListInsights` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1.ListInsightsResponse}
*/
public final class ListInsightsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommender.v1.ListInsightsResponse)
ListInsightsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInsightsResponse.newBuilder() to construct.
private ListInsightsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInsightsResponse() {
insights_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListInsightsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1.RecommenderProto
.internal_static_google_cloud_recommender_v1_ListInsightsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1.RecommenderProto
.internal_static_google_cloud_recommender_v1_ListInsightsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1.ListInsightsResponse.class,
com.google.cloud.recommender.v1.ListInsightsResponse.Builder.class);
}
public static final int INSIGHTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.recommender.v1.Insight> insights_;
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.recommender.v1.Insight> getInsightsList() {
return insights_;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.recommender.v1.InsightOrBuilder>
getInsightsOrBuilderList() {
return insights_;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
@java.lang.Override
public int getInsightsCount() {
return insights_.size();
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1.Insight getInsights(int index) {
return insights_.get(index);
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1.InsightOrBuilder getInsightsOrBuilder(int index) {
return insights_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < insights_.size(); i++) {
output.writeMessage(1, insights_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < insights_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, insights_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommender.v1.ListInsightsResponse)) {
return super.equals(obj);
}
com.google.cloud.recommender.v1.ListInsightsResponse other =
(com.google.cloud.recommender.v1.ListInsightsResponse) obj;
if (!getInsightsList().equals(other.getInsightsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getInsightsCount() > 0) {
hash = (37 * hash) + INSIGHTS_FIELD_NUMBER;
hash = (53 * hash) + getInsightsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1.ListInsightsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.recommender.v1.ListInsightsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response to the `ListInsights` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1.ListInsightsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1.ListInsightsResponse)
com.google.cloud.recommender.v1.ListInsightsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1.RecommenderProto
.internal_static_google_cloud_recommender_v1_ListInsightsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1.RecommenderProto
.internal_static_google_cloud_recommender_v1_ListInsightsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1.ListInsightsResponse.class,
com.google.cloud.recommender.v1.ListInsightsResponse.Builder.class);
}
// Construct using com.google.cloud.recommender.v1.ListInsightsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (insightsBuilder_ == null) {
insights_ = java.util.Collections.emptyList();
} else {
insights_ = null;
insightsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommender.v1.RecommenderProto
.internal_static_google_cloud_recommender_v1_ListInsightsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.recommender.v1.ListInsightsResponse getDefaultInstanceForType() {
return com.google.cloud.recommender.v1.ListInsightsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommender.v1.ListInsightsResponse build() {
com.google.cloud.recommender.v1.ListInsightsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommender.v1.ListInsightsResponse buildPartial() {
com.google.cloud.recommender.v1.ListInsightsResponse result =
new com.google.cloud.recommender.v1.ListInsightsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.recommender.v1.ListInsightsResponse result) {
if (insightsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
insights_ = java.util.Collections.unmodifiableList(insights_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.insights_ = insights_;
} else {
result.insights_ = insightsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.recommender.v1.ListInsightsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommender.v1.ListInsightsResponse) {
return mergeFrom((com.google.cloud.recommender.v1.ListInsightsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.recommender.v1.ListInsightsResponse other) {
if (other == com.google.cloud.recommender.v1.ListInsightsResponse.getDefaultInstance())
return this;
if (insightsBuilder_ == null) {
if (!other.insights_.isEmpty()) {
if (insights_.isEmpty()) {
insights_ = other.insights_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureInsightsIsMutable();
insights_.addAll(other.insights_);
}
onChanged();
}
} else {
if (!other.insights_.isEmpty()) {
if (insightsBuilder_.isEmpty()) {
insightsBuilder_.dispose();
insightsBuilder_ = null;
insights_ = other.insights_;
bitField0_ = (bitField0_ & ~0x00000001);
insightsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getInsightsFieldBuilder()
: null;
} else {
insightsBuilder_.addAllMessages(other.insights_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.recommender.v1.Insight m =
input.readMessage(
com.google.cloud.recommender.v1.Insight.parser(), extensionRegistry);
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.add(m);
} else {
insightsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.recommender.v1.Insight> insights_ =
java.util.Collections.emptyList();
private void ensureInsightsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
insights_ = new java.util.ArrayList<com.google.cloud.recommender.v1.Insight>(insights_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1.Insight,
com.google.cloud.recommender.v1.Insight.Builder,
com.google.cloud.recommender.v1.InsightOrBuilder>
insightsBuilder_;
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public java.util.List<com.google.cloud.recommender.v1.Insight> getInsightsList() {
if (insightsBuilder_ == null) {
return java.util.Collections.unmodifiableList(insights_);
} else {
return insightsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public int getInsightsCount() {
if (insightsBuilder_ == null) {
return insights_.size();
} else {
return insightsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1.Insight getInsights(int index) {
if (insightsBuilder_ == null) {
return insights_.get(index);
} else {
return insightsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder setInsights(int index, com.google.cloud.recommender.v1.Insight value) {
if (insightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.set(index, value);
onChanged();
} else {
insightsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder setInsights(
int index, com.google.cloud.recommender.v1.Insight.Builder builderForValue) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.set(index, builderForValue.build());
onChanged();
} else {
insightsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder addInsights(com.google.cloud.recommender.v1.Insight value) {
if (insightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.add(value);
onChanged();
} else {
insightsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder addInsights(int index, com.google.cloud.recommender.v1.Insight value) {
if (insightsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.add(index, value);
onChanged();
} else {
insightsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder addInsights(com.google.cloud.recommender.v1.Insight.Builder builderForValue) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.add(builderForValue.build());
onChanged();
} else {
insightsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder addInsights(
int index, com.google.cloud.recommender.v1.Insight.Builder builderForValue) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.add(index, builderForValue.build());
onChanged();
} else {
insightsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder addAllInsights(
java.lang.Iterable<? extends com.google.cloud.recommender.v1.Insight> values) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, insights_);
onChanged();
} else {
insightsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder clearInsights() {
if (insightsBuilder_ == null) {
insights_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
insightsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public Builder removeInsights(int index) {
if (insightsBuilder_ == null) {
ensureInsightsIsMutable();
insights_.remove(index);
onChanged();
} else {
insightsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1.Insight.Builder getInsightsBuilder(int index) {
return getInsightsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1.InsightOrBuilder getInsightsOrBuilder(int index) {
if (insightsBuilder_ == null) {
return insights_.get(index);
} else {
return insightsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public java.util.List<? extends com.google.cloud.recommender.v1.InsightOrBuilder>
getInsightsOrBuilderList() {
if (insightsBuilder_ != null) {
return insightsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(insights_);
}
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1.Insight.Builder addInsightsBuilder() {
return getInsightsFieldBuilder()
.addBuilder(com.google.cloud.recommender.v1.Insight.getDefaultInstance());
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public com.google.cloud.recommender.v1.Insight.Builder addInsightsBuilder(int index) {
return getInsightsFieldBuilder()
.addBuilder(index, com.google.cloud.recommender.v1.Insight.getDefaultInstance());
}
/**
*
*
* <pre>
* The set of insights for the `parent` resource.
* </pre>
*
* <code>repeated .google.cloud.recommender.v1.Insight insights = 1;</code>
*/
public java.util.List<com.google.cloud.recommender.v1.Insight.Builder>
getInsightsBuilderList() {
return getInsightsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1.Insight,
com.google.cloud.recommender.v1.Insight.Builder,
com.google.cloud.recommender.v1.InsightOrBuilder>
getInsightsFieldBuilder() {
if (insightsBuilder_ == null) {
insightsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1.Insight,
com.google.cloud.recommender.v1.Insight.Builder,
com.google.cloud.recommender.v1.InsightOrBuilder>(
insights_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
insights_ = null;
}
return insightsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be used to request the next page of results. This field is
* empty if there are no additional results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1.ListInsightsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommender.v1.ListInsightsResponse)
private static final com.google.cloud.recommender.v1.ListInsightsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommender.v1.ListInsightsResponse();
}
public static com.google.cloud.recommender.v1.ListInsightsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInsightsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListInsightsResponse>() {
@java.lang.Override
public ListInsightsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInsightsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInsightsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommender.v1.ListInsightsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,194 | java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/SegmentEventFilter.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* Creates a filter that matches events of a single event name. If a parameter
* filter expression is specified, only the subset of events that match both the
* single event name and the parameter filter expressions match this event
* filter.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.SegmentEventFilter}
*/
public final class SegmentEventFilter extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.SegmentEventFilter)
SegmentEventFilterOrBuilder {
private static final long serialVersionUID = 0L;
// Use SegmentEventFilter.newBuilder() to construct.
private SegmentEventFilter(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SegmentEventFilter() {
eventName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SegmentEventFilter();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SegmentEventFilter_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SegmentEventFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.SegmentEventFilter.class,
com.google.analytics.data.v1alpha.SegmentEventFilter.Builder.class);
}
private int bitField0_;
public static final int EVENT_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object eventName_ = "";
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return Whether the eventName field is set.
*/
@java.lang.Override
public boolean hasEventName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return The eventName.
*/
@java.lang.Override
public java.lang.String getEventName() {
java.lang.Object ref = eventName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
eventName_ = s;
return s;
}
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return The bytes for eventName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEventNameBytes() {
java.lang.Object ref = eventName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
eventName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SEGMENT_PARAMETER_FILTER_EXPRESSION_FIELD_NUMBER = 2;
private com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
segmentParameterFilterExpression_;
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*
* @return Whether the segmentParameterFilterExpression field is set.
*/
@java.lang.Override
public boolean hasSegmentParameterFilterExpression() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*
* @return The segmentParameterFilterExpression.
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
getSegmentParameterFilterExpression() {
return segmentParameterFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.getDefaultInstance()
: segmentParameterFilterExpression_;
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentParameterFilterExpressionOrBuilder
getSegmentParameterFilterExpressionOrBuilder() {
return segmentParameterFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.getDefaultInstance()
: segmentParameterFilterExpression_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, eventName_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getSegmentParameterFilterExpression());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, eventName_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getSegmentParameterFilterExpression());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.SegmentEventFilter)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.SegmentEventFilter other =
(com.google.analytics.data.v1alpha.SegmentEventFilter) obj;
if (hasEventName() != other.hasEventName()) return false;
if (hasEventName()) {
if (!getEventName().equals(other.getEventName())) return false;
}
if (hasSegmentParameterFilterExpression() != other.hasSegmentParameterFilterExpression())
return false;
if (hasSegmentParameterFilterExpression()) {
if (!getSegmentParameterFilterExpression()
.equals(other.getSegmentParameterFilterExpression())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEventName()) {
hash = (37 * hash) + EVENT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getEventName().hashCode();
}
if (hasSegmentParameterFilterExpression()) {
hash = (37 * hash) + SEGMENT_PARAMETER_FILTER_EXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getSegmentParameterFilterExpression().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.analytics.data.v1alpha.SegmentEventFilter prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Creates a filter that matches events of a single event name. If a parameter
* filter expression is specified, only the subset of events that match both the
* single event name and the parameter filter expressions match this event
* filter.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.SegmentEventFilter}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.SegmentEventFilter)
com.google.analytics.data.v1alpha.SegmentEventFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SegmentEventFilter_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SegmentEventFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.SegmentEventFilter.class,
com.google.analytics.data.v1alpha.SegmentEventFilter.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.SegmentEventFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSegmentParameterFilterExpressionFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
eventName_ = "";
segmentParameterFilterExpression_ = null;
if (segmentParameterFilterExpressionBuilder_ != null) {
segmentParameterFilterExpressionBuilder_.dispose();
segmentParameterFilterExpressionBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_SegmentEventFilter_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentEventFilter getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.SegmentEventFilter.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentEventFilter build() {
com.google.analytics.data.v1alpha.SegmentEventFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentEventFilter buildPartial() {
com.google.analytics.data.v1alpha.SegmentEventFilter result =
new com.google.analytics.data.v1alpha.SegmentEventFilter(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.data.v1alpha.SegmentEventFilter result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.eventName_ = eventName_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.segmentParameterFilterExpression_ =
segmentParameterFilterExpressionBuilder_ == null
? segmentParameterFilterExpression_
: segmentParameterFilterExpressionBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.SegmentEventFilter) {
return mergeFrom((com.google.analytics.data.v1alpha.SegmentEventFilter) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.SegmentEventFilter other) {
if (other == com.google.analytics.data.v1alpha.SegmentEventFilter.getDefaultInstance())
return this;
if (other.hasEventName()) {
eventName_ = other.eventName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasSegmentParameterFilterExpression()) {
mergeSegmentParameterFilterExpression(other.getSegmentParameterFilterExpression());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
eventName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getSegmentParameterFilterExpressionFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object eventName_ = "";
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return Whether the eventName field is set.
*/
public boolean hasEventName() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return The eventName.
*/
public java.lang.String getEventName() {
java.lang.Object ref = eventName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
eventName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return The bytes for eventName.
*/
public com.google.protobuf.ByteString getEventNameBytes() {
java.lang.Object ref = eventName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
eventName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @param value The eventName to set.
* @return This builder for chaining.
*/
public Builder setEventName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
eventName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearEventName() {
eventName_ = getDefaultInstance().getEventName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* This filter matches events of this single event name. Event name is
* required.
* </pre>
*
* <code>optional string event_name = 1;</code>
*
* @param value The bytes for eventName to set.
* @return This builder for chaining.
*/
public Builder setEventNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
eventName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
segmentParameterFilterExpression_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression,
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.Builder,
com.google.analytics.data.v1alpha.SegmentParameterFilterExpressionOrBuilder>
segmentParameterFilterExpressionBuilder_;
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*
* @return Whether the segmentParameterFilterExpression field is set.
*/
public boolean hasSegmentParameterFilterExpression() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*
* @return The segmentParameterFilterExpression.
*/
public com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
getSegmentParameterFilterExpression() {
if (segmentParameterFilterExpressionBuilder_ == null) {
return segmentParameterFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
.getDefaultInstance()
: segmentParameterFilterExpression_;
} else {
return segmentParameterFilterExpressionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
public Builder setSegmentParameterFilterExpression(
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression value) {
if (segmentParameterFilterExpressionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
segmentParameterFilterExpression_ = value;
} else {
segmentParameterFilterExpressionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
public Builder setSegmentParameterFilterExpression(
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.Builder
builderForValue) {
if (segmentParameterFilterExpressionBuilder_ == null) {
segmentParameterFilterExpression_ = builderForValue.build();
} else {
segmentParameterFilterExpressionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
public Builder mergeSegmentParameterFilterExpression(
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression value) {
if (segmentParameterFilterExpressionBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& segmentParameterFilterExpression_ != null
&& segmentParameterFilterExpression_
!= com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
.getDefaultInstance()) {
getSegmentParameterFilterExpressionBuilder().mergeFrom(value);
} else {
segmentParameterFilterExpression_ = value;
}
} else {
segmentParameterFilterExpressionBuilder_.mergeFrom(value);
}
if (segmentParameterFilterExpression_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
public Builder clearSegmentParameterFilterExpression() {
bitField0_ = (bitField0_ & ~0x00000002);
segmentParameterFilterExpression_ = null;
if (segmentParameterFilterExpressionBuilder_ != null) {
segmentParameterFilterExpressionBuilder_.dispose();
segmentParameterFilterExpressionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
public com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.Builder
getSegmentParameterFilterExpressionBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getSegmentParameterFilterExpressionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
public com.google.analytics.data.v1alpha.SegmentParameterFilterExpressionOrBuilder
getSegmentParameterFilterExpressionOrBuilder() {
if (segmentParameterFilterExpressionBuilder_ != null) {
return segmentParameterFilterExpressionBuilder_.getMessageOrBuilder();
} else {
return segmentParameterFilterExpression_ == null
? com.google.analytics.data.v1alpha.SegmentParameterFilterExpression
.getDefaultInstance()
: segmentParameterFilterExpression_;
}
}
/**
*
*
* <pre>
* If specified, this filter matches events that match both the single event
* name and the parameter filter expressions.
*
* Inside the parameter filter expression, only parameter filters are
* available.
* </pre>
*
* <code>
* optional .google.analytics.data.v1alpha.SegmentParameterFilterExpression segment_parameter_filter_expression = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression,
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.Builder,
com.google.analytics.data.v1alpha.SegmentParameterFilterExpressionOrBuilder>
getSegmentParameterFilterExpressionFieldBuilder() {
if (segmentParameterFilterExpressionBuilder_ == null) {
segmentParameterFilterExpressionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression,
com.google.analytics.data.v1alpha.SegmentParameterFilterExpression.Builder,
com.google.analytics.data.v1alpha.SegmentParameterFilterExpressionOrBuilder>(
getSegmentParameterFilterExpression(), getParentForChildren(), isClean());
segmentParameterFilterExpression_ = null;
}
return segmentParameterFilterExpressionBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.SegmentEventFilter)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.SegmentEventFilter)
private static final com.google.analytics.data.v1alpha.SegmentEventFilter DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.SegmentEventFilter();
}
public static com.google.analytics.data.v1alpha.SegmentEventFilter getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SegmentEventFilter> PARSER =
new com.google.protobuf.AbstractParser<SegmentEventFilter>() {
@java.lang.Override
public SegmentEventFilter parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SegmentEventFilter> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SegmentEventFilter> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.SegmentEventFilter getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,195 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ExportDataResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/dataset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [DatasetService.ExportData][google.cloud.aiplatform.v1.DatasetService.ExportData].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ExportDataResponse}
*/
public final class ExportDataResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ExportDataResponse)
ExportDataResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExportDataResponse.newBuilder() to construct.
private ExportDataResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExportDataResponse() {
exportedFiles_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExportDataResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportDataResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportDataResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ExportDataResponse.class,
com.google.cloud.aiplatform.v1.ExportDataResponse.Builder.class);
}
private int bitField0_;
public static final int EXPORTED_FILES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList exportedFiles_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @return A list containing the exportedFiles.
*/
public com.google.protobuf.ProtocolStringList getExportedFilesList() {
return exportedFiles_;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @return The count of exportedFiles.
*/
public int getExportedFilesCount() {
return exportedFiles_.size();
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param index The index of the element to return.
* @return The exportedFiles at the given index.
*/
public java.lang.String getExportedFiles(int index) {
return exportedFiles_.get(index);
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the exportedFiles at the given index.
*/
public com.google.protobuf.ByteString getExportedFilesBytes(int index) {
return exportedFiles_.getByteString(index);
}
public static final int DATA_STATS_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.Model.DataStats dataStats_;
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*
* @return Whether the dataStats field is set.
*/
@java.lang.Override
public boolean hasDataStats() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*
* @return The dataStats.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Model.DataStats getDataStats() {
return dataStats_ == null
? com.google.cloud.aiplatform.v1.Model.DataStats.getDefaultInstance()
: dataStats_;
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Model.DataStatsOrBuilder getDataStatsOrBuilder() {
return dataStats_ == null
? com.google.cloud.aiplatform.v1.Model.DataStats.getDefaultInstance()
: dataStats_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < exportedFiles_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, exportedFiles_.getRaw(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getDataStats());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < exportedFiles_.size(); i++) {
dataSize += computeStringSizeNoTag(exportedFiles_.getRaw(i));
}
size += dataSize;
size += 1 * getExportedFilesList().size();
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDataStats());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ExportDataResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ExportDataResponse other =
(com.google.cloud.aiplatform.v1.ExportDataResponse) obj;
if (!getExportedFilesList().equals(other.getExportedFilesList())) return false;
if (hasDataStats() != other.hasDataStats()) return false;
if (hasDataStats()) {
if (!getDataStats().equals(other.getDataStats())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getExportedFilesCount() > 0) {
hash = (37 * hash) + EXPORTED_FILES_FIELD_NUMBER;
hash = (53 * hash) + getExportedFilesList().hashCode();
}
if (hasDataStats()) {
hash = (37 * hash) + DATA_STATS_FIELD_NUMBER;
hash = (53 * hash) + getDataStats().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ExportDataResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [DatasetService.ExportData][google.cloud.aiplatform.v1.DatasetService.ExportData].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ExportDataResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ExportDataResponse)
com.google.cloud.aiplatform.v1.ExportDataResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportDataResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportDataResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ExportDataResponse.class,
com.google.cloud.aiplatform.v1.ExportDataResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ExportDataResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDataStatsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
exportedFiles_ = com.google.protobuf.LazyStringArrayList.emptyList();
dataStats_ = null;
if (dataStatsBuilder_ != null) {
dataStatsBuilder_.dispose();
dataStatsBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.DatasetServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportDataResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportDataResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ExportDataResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportDataResponse build() {
com.google.cloud.aiplatform.v1.ExportDataResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportDataResponse buildPartial() {
com.google.cloud.aiplatform.v1.ExportDataResponse result =
new com.google.cloud.aiplatform.v1.ExportDataResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ExportDataResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
exportedFiles_.makeImmutable();
result.exportedFiles_ = exportedFiles_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.dataStats_ = dataStatsBuilder_ == null ? dataStats_ : dataStatsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ExportDataResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ExportDataResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ExportDataResponse other) {
if (other == com.google.cloud.aiplatform.v1.ExportDataResponse.getDefaultInstance())
return this;
if (!other.exportedFiles_.isEmpty()) {
if (exportedFiles_.isEmpty()) {
exportedFiles_ = other.exportedFiles_;
bitField0_ |= 0x00000001;
} else {
ensureExportedFilesIsMutable();
exportedFiles_.addAll(other.exportedFiles_);
}
onChanged();
}
if (other.hasDataStats()) {
mergeDataStats(other.getDataStats());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
ensureExportedFilesIsMutable();
exportedFiles_.add(s);
break;
} // case 10
case 18:
{
input.readMessage(getDataStatsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList exportedFiles_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureExportedFilesIsMutable() {
if (!exportedFiles_.isModifiable()) {
exportedFiles_ = new com.google.protobuf.LazyStringArrayList(exportedFiles_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @return A list containing the exportedFiles.
*/
public com.google.protobuf.ProtocolStringList getExportedFilesList() {
exportedFiles_.makeImmutable();
return exportedFiles_;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @return The count of exportedFiles.
*/
public int getExportedFilesCount() {
return exportedFiles_.size();
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param index The index of the element to return.
* @return The exportedFiles at the given index.
*/
public java.lang.String getExportedFiles(int index) {
return exportedFiles_.get(index);
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the exportedFiles at the given index.
*/
public com.google.protobuf.ByteString getExportedFilesBytes(int index) {
return exportedFiles_.getByteString(index);
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param index The index to set the value at.
* @param value The exportedFiles to set.
* @return This builder for chaining.
*/
public Builder setExportedFiles(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureExportedFilesIsMutable();
exportedFiles_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param value The exportedFiles to add.
* @return This builder for chaining.
*/
public Builder addExportedFiles(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureExportedFilesIsMutable();
exportedFiles_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param values The exportedFiles to add.
* @return This builder for chaining.
*/
public Builder addAllExportedFiles(java.lang.Iterable<java.lang.String> values) {
ensureExportedFilesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, exportedFiles_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearExportedFiles() {
exportedFiles_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* All of the files that are exported in this export operation. For custom
* code training export, only three (training, validation and test)
* Cloud Storage paths in wildcard format are populated
* (for example, gs://.../training-*).
* </pre>
*
* <code>repeated string exported_files = 1;</code>
*
* @param value The bytes of the exportedFiles to add.
* @return This builder for chaining.
*/
public Builder addExportedFilesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureExportedFilesIsMutable();
exportedFiles_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1.Model.DataStats dataStats_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.Model.DataStats,
com.google.cloud.aiplatform.v1.Model.DataStats.Builder,
com.google.cloud.aiplatform.v1.Model.DataStatsOrBuilder>
dataStatsBuilder_;
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*
* @return Whether the dataStats field is set.
*/
public boolean hasDataStats() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*
* @return The dataStats.
*/
public com.google.cloud.aiplatform.v1.Model.DataStats getDataStats() {
if (dataStatsBuilder_ == null) {
return dataStats_ == null
? com.google.cloud.aiplatform.v1.Model.DataStats.getDefaultInstance()
: dataStats_;
} else {
return dataStatsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
public Builder setDataStats(com.google.cloud.aiplatform.v1.Model.DataStats value) {
if (dataStatsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataStats_ = value;
} else {
dataStatsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
public Builder setDataStats(
com.google.cloud.aiplatform.v1.Model.DataStats.Builder builderForValue) {
if (dataStatsBuilder_ == null) {
dataStats_ = builderForValue.build();
} else {
dataStatsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
public Builder mergeDataStats(com.google.cloud.aiplatform.v1.Model.DataStats value) {
if (dataStatsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& dataStats_ != null
&& dataStats_ != com.google.cloud.aiplatform.v1.Model.DataStats.getDefaultInstance()) {
getDataStatsBuilder().mergeFrom(value);
} else {
dataStats_ = value;
}
} else {
dataStatsBuilder_.mergeFrom(value);
}
if (dataStats_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
public Builder clearDataStats() {
bitField0_ = (bitField0_ & ~0x00000002);
dataStats_ = null;
if (dataStatsBuilder_ != null) {
dataStatsBuilder_.dispose();
dataStatsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Model.DataStats.Builder getDataStatsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDataStatsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Model.DataStatsOrBuilder getDataStatsOrBuilder() {
if (dataStatsBuilder_ != null) {
return dataStatsBuilder_.getMessageOrBuilder();
} else {
return dataStats_ == null
? com.google.cloud.aiplatform.v1.Model.DataStats.getDefaultInstance()
: dataStats_;
}
}
/**
*
*
* <pre>
* Only present for custom code training export use case. Records data stats,
* i.e., train/validation/test item/annotation counts calculated during
* the export operation.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.Model.DataStats data_stats = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.Model.DataStats,
com.google.cloud.aiplatform.v1.Model.DataStats.Builder,
com.google.cloud.aiplatform.v1.Model.DataStatsOrBuilder>
getDataStatsFieldBuilder() {
if (dataStatsBuilder_ == null) {
dataStatsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.Model.DataStats,
com.google.cloud.aiplatform.v1.Model.DataStats.Builder,
com.google.cloud.aiplatform.v1.Model.DataStatsOrBuilder>(
getDataStats(), getParentForChildren(), isClean());
dataStats_ = null;
}
return dataStatsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ExportDataResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ExportDataResponse)
private static final com.google.cloud.aiplatform.v1.ExportDataResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ExportDataResponse();
}
public static com.google.cloud.aiplatform.v1.ExportDataResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExportDataResponse> PARSER =
new com.google.protobuf.AbstractParser<ExportDataResponse>() {
@java.lang.Override
public ExportDataResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ExportDataResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExportDataResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportDataResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/gson | 35,960 | gson/src/test/java/com/google/gson/functional/PrimitiveTest.java | /*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gson.functional;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSyntaxException;
import com.google.gson.LongSerializationPolicy;
import com.google.gson.internal.LazilyParsedNumber;
import com.google.gson.reflect.TypeToken;
import java.io.Serializable;
import java.io.StringReader;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
/**
* Functional tests for Json primitive values: integers, and floating point numbers.
*
* @author Inderjeet Singh
* @author Joel Leitch
*/
public class PrimitiveTest {
private Gson gson;
@Before
public void setUp() throws Exception {
gson = new Gson();
}
@Test
public void testPrimitiveIntegerAutoboxedSerialization() {
assertThat(gson.toJson(1)).isEqualTo("1");
}
@Test
public void testPrimitiveIntegerAutoboxedDeserialization() {
int expected = 1;
int actual = gson.fromJson("1", int.class);
assertThat(actual).isEqualTo(expected);
actual = gson.fromJson("1", Integer.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testByteSerialization() {
assertThat(gson.toJson(1, byte.class)).isEqualTo("1");
assertThat(gson.toJson(1, Byte.class)).isEqualTo("1");
assertThat(gson.toJson(Byte.MIN_VALUE, Byte.class)).isEqualTo(Byte.toString(Byte.MIN_VALUE));
assertThat(gson.toJson(Byte.MAX_VALUE, Byte.class)).isEqualTo(Byte.toString(Byte.MAX_VALUE));
// Should perform narrowing conversion
assertThat(gson.toJson(128, Byte.class)).isEqualTo("-128");
assertThat(gson.toJson(1.5, Byte.class)).isEqualTo("1");
}
@Test
public void testByteDeserialization() {
Byte boxed = gson.fromJson("1", Byte.class);
assertThat(boxed).isEqualTo(1);
byte primitive = gson.fromJson("1", byte.class);
assertThat(primitive).isEqualTo(1);
byte[] bytes = gson.fromJson("[-128, 0, 127, 255]", byte[].class);
assertThat(bytes).isEqualTo(new byte[] {-128, 0, 127, -1});
}
@Test
public void testByteDeserializationLossy() {
JsonSyntaxException e =
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("-129", byte.class));
assertThat(e).hasMessageThat().isEqualTo("Lossy conversion from -129 to byte; at path $");
e = assertThrows(JsonSyntaxException.class, () -> gson.fromJson("256", byte.class));
assertThat(e).hasMessageThat().isEqualTo("Lossy conversion from 256 to byte; at path $");
e = assertThrows(JsonSyntaxException.class, () -> gson.fromJson("2147483648", byte.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"java.lang.NumberFormatException: Expected an int but was 2147483648"
+ " at line 1 column 11 path $");
}
@Test
public void testShortSerialization() {
assertThat(gson.toJson(1, short.class)).isEqualTo("1");
assertThat(gson.toJson(1, Short.class)).isEqualTo("1");
assertThat(gson.toJson(Short.MIN_VALUE, Short.class))
.isEqualTo(Short.toString(Short.MIN_VALUE));
assertThat(gson.toJson(Short.MAX_VALUE, Short.class))
.isEqualTo(Short.toString(Short.MAX_VALUE));
// Should perform widening conversion
assertThat(gson.toJson((byte) 1, Short.class)).isEqualTo("1");
// Should perform narrowing conversion
assertThat(gson.toJson(32768, Short.class)).isEqualTo("-32768");
assertThat(gson.toJson(1.5, Short.class)).isEqualTo("1");
}
@Test
public void testShortDeserialization() {
Short boxed = gson.fromJson("1", Short.class);
assertThat(boxed).isEqualTo(1);
short primitive = gson.fromJson("1", short.class);
assertThat(primitive).isEqualTo(1);
short[] shorts = gson.fromJson("[-32768, 0, 32767, 65535]", short[].class);
assertThat(shorts).isEqualTo(new short[] {-32768, 0, 32767, -1});
}
@Test
public void testShortDeserializationLossy() {
JsonSyntaxException e =
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("-32769", short.class));
assertThat(e).hasMessageThat().isEqualTo("Lossy conversion from -32769 to short; at path $");
e = assertThrows(JsonSyntaxException.class, () -> gson.fromJson("65536", short.class));
assertThat(e).hasMessageThat().isEqualTo("Lossy conversion from 65536 to short; at path $");
e = assertThrows(JsonSyntaxException.class, () -> gson.fromJson("2147483648", short.class));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"java.lang.NumberFormatException: Expected an int but was 2147483648"
+ " at line 1 column 11 path $");
}
@Test
public void testIntSerialization() {
assertThat(gson.toJson(1, int.class)).isEqualTo("1");
assertThat(gson.toJson(1, Integer.class)).isEqualTo("1");
assertThat(gson.toJson(Integer.MIN_VALUE, Integer.class))
.isEqualTo(Integer.toString(Integer.MIN_VALUE));
assertThat(gson.toJson(Integer.MAX_VALUE, Integer.class))
.isEqualTo(Integer.toString(Integer.MAX_VALUE));
// Should perform widening conversion
assertThat(gson.toJson((byte) 1, Integer.class)).isEqualTo("1");
// Should perform narrowing conversion
assertThat(gson.toJson(2147483648L, Integer.class)).isEqualTo("-2147483648");
assertThat(gson.toJson(1.5, Integer.class)).isEqualTo("1");
}
@Test
public void testLongSerialization() {
assertThat(gson.toJson(1L, long.class)).isEqualTo("1");
assertThat(gson.toJson(1L, Long.class)).isEqualTo("1");
assertThat(gson.toJson(Long.MIN_VALUE, Long.class)).isEqualTo(Long.toString(Long.MIN_VALUE));
assertThat(gson.toJson(Long.MAX_VALUE, Long.class)).isEqualTo(Long.toString(Long.MAX_VALUE));
// Should perform widening conversion
assertThat(gson.toJson((byte) 1, Long.class)).isEqualTo("1");
// Should perform narrowing conversion
assertThat(gson.toJson(1.5, Long.class)).isEqualTo("1");
}
@Test
public void testFloatSerialization() {
assertThat(gson.toJson(1.5f, float.class)).isEqualTo("1.5");
assertThat(gson.toJson(1.5f, Float.class)).isEqualTo("1.5");
assertThat(gson.toJson(Float.MIN_VALUE, Float.class))
.isEqualTo(Float.toString(Float.MIN_VALUE));
assertThat(gson.toJson(Float.MAX_VALUE, Float.class))
.isEqualTo(Float.toString(Float.MAX_VALUE));
// Should perform widening conversion
assertThat(gson.toJson((byte) 1, Float.class)).isEqualTo("1.0");
// (This widening conversion is actually lossy)
assertThat(gson.toJson(Long.MAX_VALUE - 10L, Float.class))
.isEqualTo(Float.toString((float) (Long.MAX_VALUE - 10L)));
// Should perform narrowing conversion
gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Double.MAX_VALUE, Float.class)).isEqualTo("Infinity");
}
@Test
public void testDoubleSerialization() {
assertThat(gson.toJson(1.5, double.class)).isEqualTo("1.5");
assertThat(gson.toJson(1.5, Double.class)).isEqualTo("1.5");
assertThat(gson.toJson(Double.MIN_VALUE, Double.class))
.isEqualTo(Double.toString(Double.MIN_VALUE));
assertThat(gson.toJson(Double.MAX_VALUE, Double.class))
.isEqualTo(Double.toString(Double.MAX_VALUE));
// Should perform widening conversion
assertThat(gson.toJson((byte) 1, Double.class)).isEqualTo("1.0");
// (This widening conversion is actually lossy)
assertThat(gson.toJson(Long.MAX_VALUE - 10L, Double.class))
.isEqualTo(Double.toString((double) (Long.MAX_VALUE - 10L)));
}
@Test
public void testPrimitiveIntegerAutoboxedInASingleElementArraySerialization() {
int[] target = {-9332};
assertThat(gson.toJson(target)).isEqualTo("[-9332]");
assertThat(gson.toJson(target, int[].class)).isEqualTo("[-9332]");
assertThat(gson.toJson(target, Integer[].class)).isEqualTo("[-9332]");
}
@Test
public void testReallyLongValuesSerialization() {
long value = 333961828784581L;
assertThat(gson.toJson(value)).isEqualTo("333961828784581");
}
@Test
public void testReallyLongValuesDeserialization() {
String json = "333961828784581";
long value = gson.fromJson(json, Long.class);
assertThat(value).isEqualTo(333961828784581L);
}
@Test
public void testPrimitiveLongAutoboxedSerialization() {
assertThat(gson.toJson(1L, long.class)).isEqualTo("1");
assertThat(gson.toJson(1L, Long.class)).isEqualTo("1");
}
@Test
public void testPrimitiveLongAutoboxedDeserialization() {
long expected = 1L;
long actual = gson.fromJson("1", long.class);
assertThat(actual).isEqualTo(expected);
actual = gson.fromJson("1", Long.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testPrimitiveLongAutoboxedInASingleElementArraySerialization() {
long[] target = {-23L};
assertThat(gson.toJson(target)).isEqualTo("[-23]");
assertThat(gson.toJson(target, long[].class)).isEqualTo("[-23]");
assertThat(gson.toJson(target, Long[].class)).isEqualTo("[-23]");
}
@Test
public void testPrimitiveBooleanAutoboxedSerialization() {
assertThat(gson.toJson(true)).isEqualTo("true");
assertThat(gson.toJson(false)).isEqualTo("false");
}
@Test
public void testBooleanDeserialization() {
boolean value = gson.fromJson("false", boolean.class);
assertThat(value).isEqualTo(false);
value = gson.fromJson("true", boolean.class);
assertThat(value).isEqualTo(true);
}
@Test
public void testPrimitiveBooleanAutoboxedInASingleElementArraySerialization() {
boolean[] target = {false};
assertThat(gson.toJson(target)).isEqualTo("[false]");
assertThat(gson.toJson(target, boolean[].class)).isEqualTo("[false]");
assertThat(gson.toJson(target, Boolean[].class)).isEqualTo("[false]");
}
@Test
public void testNumberSerialization() {
Number expected = 1L;
String json = gson.toJson(expected);
assertThat(json).isEqualTo(expected.toString());
json = gson.toJson(expected, Number.class);
assertThat(json).isEqualTo(expected.toString());
}
@Test
public void testNumberDeserialization() {
String json = "1";
Number expected = Integer.valueOf(json);
Number actual = gson.fromJson(json, Number.class);
assertThat(actual.intValue()).isEqualTo(expected.intValue());
json = String.valueOf(Long.MAX_VALUE);
expected = Long.valueOf(json);
actual = gson.fromJson(json, Number.class);
assertThat(actual.longValue()).isEqualTo(expected.longValue());
json = "1.0";
actual = gson.fromJson(json, Number.class);
assertThat(actual.longValue()).isEqualTo(1L);
}
@Test
public void testNumberAsStringDeserialization() {
Number value = gson.fromJson("\"18\"", Number.class);
assertThat(value.intValue()).isEqualTo(18);
}
@Test
public void testPrimitiveDoubleAutoboxedSerialization() {
assertThat(gson.toJson(-122.08234335D)).isEqualTo("-122.08234335");
assertThat(gson.toJson(122.08112002D)).isEqualTo("122.08112002");
}
@Test
public void testPrimitiveDoubleAutoboxedDeserialization() {
double actual = gson.fromJson("-122.08858585", double.class);
assertThat(actual).isEqualTo(-122.08858585D);
actual = gson.fromJson("122.023900008000", Double.class);
assertThat(actual).isEqualTo(122.023900008D);
}
@Test
public void testPrimitiveDoubleAutoboxedInASingleElementArraySerialization() {
double[] target = {-122.08D};
assertThat(gson.toJson(target)).isEqualTo("[-122.08]");
assertThat(gson.toJson(target, double[].class)).isEqualTo("[-122.08]");
assertThat(gson.toJson(target, Double[].class)).isEqualTo("[-122.08]");
}
@Test
public void testDoubleAsStringRepresentationDeserialization() {
String doubleValue = "1.0043E+5";
Double expected = Double.valueOf(doubleValue);
Double actual = gson.fromJson(doubleValue, Double.class);
assertThat(actual).isEqualTo(expected);
double actual1 = gson.fromJson(doubleValue, double.class);
assertThat(actual1).isEqualTo(expected);
}
@Test
public void testDoubleNoFractAsStringRepresentationDeserialization() {
String doubleValue = "1E+5";
Double expected = Double.valueOf(doubleValue);
Double actual = gson.fromJson(doubleValue, Double.class);
assertThat(actual).isEqualTo(expected);
double actual1 = gson.fromJson(doubleValue, double.class);
assertThat(actual1).isEqualTo(expected);
}
@Test
public void testDoubleArrayDeserialization() {
String json =
"[0.0, 0.004761904761904762, 3.4013606962703525E-4, 7.936508173034305E-4,"
+ "0.0011904761904761906, 0.0]";
double[] values = gson.fromJson(json, double[].class);
assertThat(values).hasLength(6);
assertThat(values[0]).isEqualTo(0.0);
assertThat(values[1]).isEqualTo(0.004761904761904762);
assertThat(values[2]).isEqualTo(3.4013606962703525E-4);
assertThat(values[3]).isEqualTo(7.936508173034305E-4);
assertThat(values[4]).isEqualTo(0.0011904761904761906);
assertThat(values[5]).isEqualTo(0.0);
}
@Test
public void testLargeDoubleDeserialization() {
String doubleValue = "1.234567899E8";
Double expected = Double.valueOf(doubleValue);
Double actual = gson.fromJson(doubleValue, Double.class);
assertThat(actual).isEqualTo(expected);
double actual1 = gson.fromJson(doubleValue, double.class);
assertThat(actual1).isEqualTo(expected);
}
@Test
public void testBigDecimalSerialization() {
BigDecimal target = new BigDecimal("-122.0e-21");
String json = gson.toJson(target);
assertThat(new BigDecimal(json)).isEqualTo(target);
}
@Test
public void testBigDecimalDeserialization() {
BigDecimal target = new BigDecimal("-122.0e-21");
String json = "-122.0e-21";
assertThat(gson.fromJson(json, BigDecimal.class)).isEqualTo(target);
}
@Test
public void testBigDecimalInASingleElementArraySerialization() {
BigDecimal[] target = {new BigDecimal("-122.08e-21")};
String json = gson.toJson(target);
String actual = extractElementFromArray(json);
assertThat(new BigDecimal(actual)).isEqualTo(target[0]);
json = gson.toJson(target, BigDecimal[].class);
actual = extractElementFromArray(json);
assertThat(new BigDecimal(actual)).isEqualTo(target[0]);
}
@Test
public void testSmallValueForBigDecimalSerialization() {
BigDecimal target = new BigDecimal("1.55");
String actual = gson.toJson(target);
assertThat(actual).isEqualTo(target.toString());
}
@Test
public void testSmallValueForBigDecimalDeserialization() {
BigDecimal expected = new BigDecimal("1.55");
BigDecimal actual = gson.fromJson("1.55", BigDecimal.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testBigDecimalPreservePrecisionSerialization() {
String expectedValue = "1.000";
BigDecimal obj = new BigDecimal(expectedValue);
String actualValue = gson.toJson(obj);
assertThat(actualValue).isEqualTo(expectedValue);
}
@Test
public void testBigDecimalPreservePrecisionDeserialization() {
String json = "1.000";
BigDecimal expected = new BigDecimal(json);
BigDecimal actual = gson.fromJson(json, BigDecimal.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testBigDecimalAsStringRepresentationDeserialization() {
String doubleValue = "0.05E+5";
BigDecimal expected = new BigDecimal(doubleValue);
BigDecimal actual = gson.fromJson(doubleValue, BigDecimal.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testBigDecimalNoFractAsStringRepresentationDeserialization() {
String doubleValue = "5E+5";
BigDecimal expected = new BigDecimal(doubleValue);
BigDecimal actual = gson.fromJson(doubleValue, BigDecimal.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testBigIntegerSerialization() {
BigInteger target = new BigInteger("12121211243123245845384534687435634558945453489543985435");
assertThat(gson.toJson(target)).isEqualTo(target.toString());
}
@Test
public void testBigIntegerDeserialization() {
String json = "12121211243123245845384534687435634558945453489543985435";
BigInteger target = new BigInteger(json);
assertThat(gson.fromJson(json, BigInteger.class)).isEqualTo(target);
}
@Test
public void testBigIntegerInASingleElementArraySerialization() {
BigInteger[] target = {new BigInteger("1212121243434324323254365345367456456456465464564564")};
String json = gson.toJson(target);
String actual = extractElementFromArray(json);
assertThat(new BigInteger(actual)).isEqualTo(target[0]);
json = gson.toJson(target, BigInteger[].class);
actual = extractElementFromArray(json);
assertThat(new BigInteger(actual)).isEqualTo(target[0]);
}
@Test
public void testSmallValueForBigIntegerSerialization() {
BigInteger target = new BigInteger("15");
String actual = gson.toJson(target);
assertThat(actual).isEqualTo(target.toString());
}
@Test
public void testSmallValueForBigIntegerDeserialization() {
BigInteger expected = new BigInteger("15");
BigInteger actual = gson.fromJson("15", BigInteger.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testBadValueForBigIntegerDeserialization() {
// BigInteger can not be decimal values
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("15.099", BigInteger.class));
}
@Test
public void testLazilyParsedNumberSerialization() {
LazilyParsedNumber target = new LazilyParsedNumber("1.5");
String actual = gson.toJson(target);
assertThat(actual).isEqualTo("1.5");
}
@Test
public void testLazilyParsedNumberDeserialization() {
LazilyParsedNumber expected = new LazilyParsedNumber("1.5");
LazilyParsedNumber actual = gson.fromJson("1.5", LazilyParsedNumber.class);
assertThat(actual).isEqualTo(expected);
}
@Test
public void testMoreSpecificSerialization() {
Gson gson = new Gson();
String expected = "This is a string";
String expectedJson = gson.toJson(expected);
Serializable serializableString = expected;
String actualJson = gson.toJson(serializableString, Serializable.class);
assertThat(actualJson).isNotEqualTo(expectedJson);
}
private static String extractElementFromArray(String json) {
return json.substring(json.indexOf('[') + 1, json.lastIndexOf(']'));
}
@Test
public void testDoubleNaNSerializationNotSupportedByDefault() {
String expectedMessage =
"NaN is not a valid double value as per JSON specification. To override this behavior,"
+ " use GsonBuilder.serializeSpecialFloatingPointValues() method.";
var e =
assertThrows(IllegalArgumentException.class, () -> gson.toJson(Double.NaN, double.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
e = assertThrows(IllegalArgumentException.class, () -> gson.toJson(Double.NaN, Double.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
}
@Test
public void testDoubleNaNSerialization() {
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Double.NaN, double.class)).isEqualTo("NaN");
assertThat(gson.toJson(Double.NaN, Double.class)).isEqualTo("NaN");
}
@Test
public void testDoubleNaNDeserialization() {
assertThat(gson.fromJson("NaN", double.class)).isNaN();
assertThat(gson.fromJson("NaN", Double.class)).isNaN();
}
@Test
public void testFloatNaNSerializationNotSupportedByDefault() {
String expectedMessage =
"NaN is not a valid double value as per JSON specification. To override this behavior,"
+ " use GsonBuilder.serializeSpecialFloatingPointValues() method.";
var e = assertThrows(IllegalArgumentException.class, () -> gson.toJson(Float.NaN, float.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
e = assertThrows(IllegalArgumentException.class, () -> gson.toJson(Float.NaN, Float.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
}
@Test
public void testFloatNaNSerialization() {
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Float.NaN, float.class)).isEqualTo("NaN");
assertThat(gson.toJson(Float.NaN, Float.class)).isEqualTo("NaN");
}
@Test
public void testFloatNaNDeserialization() {
assertThat(gson.fromJson("NaN", float.class)).isNaN();
assertThat(gson.fromJson("NaN", Float.class)).isNaN();
}
@Test
public void testBigDecimalNaNDeserializationNotSupported() {
// Gson should not accept NaN for deserialization of BigDecimal
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("NaN", BigDecimal.class));
}
@Test
public void testDoubleInfinitySerializationNotSupportedByDefault() {
String expectedMessage =
"Infinity is not a valid double value as per JSON specification. To override this"
+ " behavior, use GsonBuilder.serializeSpecialFloatingPointValues() method.";
var e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Double.POSITIVE_INFINITY, double.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Double.POSITIVE_INFINITY, Double.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
}
@Test
public void testDoubleInfinitySerialization() {
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Double.POSITIVE_INFINITY, double.class)).isEqualTo("Infinity");
assertThat(gson.toJson(Double.POSITIVE_INFINITY, Double.class)).isEqualTo("Infinity");
}
@Test
public void testDoubleInfinityDeserialization() {
assertThat(gson.fromJson("Infinity", double.class)).isPositiveInfinity();
assertThat(gson.fromJson("Infinity", Double.class)).isPositiveInfinity();
}
@Test
public void testFloatInfinitySerializationNotSupportedByDefault() {
String expectedMessage =
"Infinity is not a valid double value as per JSON specification. To override this"
+ " behavior, use GsonBuilder.serializeSpecialFloatingPointValues() method.";
var e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Float.POSITIVE_INFINITY, float.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Float.POSITIVE_INFINITY, Float.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
}
@Test
public void testFloatInfinitySerialization() {
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Float.POSITIVE_INFINITY, float.class)).isEqualTo("Infinity");
assertThat(gson.toJson(Float.POSITIVE_INFINITY, Float.class)).isEqualTo("Infinity");
}
@Test
public void testFloatInfinityDeserialization() {
assertThat(gson.fromJson("Infinity", float.class)).isPositiveInfinity();
assertThat(gson.fromJson("Infinity", Float.class)).isPositiveInfinity();
}
@Test
public void testBigDecimalInfinityDeserializationNotSupported() {
// Gson should not accept positive infinity for deserialization of BigDecimal
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("Infinity", BigDecimal.class));
}
@Test
public void testNegativeInfinitySerializationNotSupportedByDefault() {
String expectedMessage =
"-Infinity is not a valid double value as per JSON specification. To override this"
+ " behavior, use GsonBuilder.serializeSpecialFloatingPointValues() method.";
var e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Double.NEGATIVE_INFINITY, double.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Double.NEGATIVE_INFINITY, Double.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
}
@Test
public void testNegativeInfinitySerialization() {
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Double.NEGATIVE_INFINITY, double.class)).isEqualTo("-Infinity");
assertThat(gson.toJson(Double.NEGATIVE_INFINITY, Double.class)).isEqualTo("-Infinity");
}
@Test
public void testNegativeInfinityDeserialization() {
assertThat(gson.fromJson("-Infinity", double.class)).isNegativeInfinity();
assertThat(gson.fromJson("-Infinity", Double.class)).isNegativeInfinity();
}
@Test
public void testNegativeInfinityFloatSerializationNotSupportedByDefault() {
String expectedMessage =
"-Infinity is not a valid double value as per JSON specification. To override this"
+ " behavior, use GsonBuilder.serializeSpecialFloatingPointValues() method.";
var e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Float.NEGATIVE_INFINITY, float.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
e =
assertThrows(
IllegalArgumentException.class,
() -> gson.toJson(Float.NEGATIVE_INFINITY, Float.class));
assertThat(e).hasMessageThat().isEqualTo(expectedMessage);
}
@Test
public void testNegativeInfinityFloatSerialization() {
Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().create();
assertThat(gson.toJson(Float.NEGATIVE_INFINITY, float.class)).isEqualTo("-Infinity");
assertThat(gson.toJson(Float.NEGATIVE_INFINITY, Float.class)).isEqualTo("-Infinity");
}
@Test
public void testNegativeInfinityFloatDeserialization() {
assertThat(gson.fromJson("-Infinity", float.class)).isNegativeInfinity();
assertThat(gson.fromJson("-Infinity", Float.class)).isNegativeInfinity();
}
@Test
public void testBigDecimalNegativeInfinityDeserializationNotSupported() {
// Gson should not accept negative infinity for deserialization of BigDecimal
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("-Infinity", BigDecimal.class));
}
@Test
public void testLongAsStringSerialization() {
gson = new GsonBuilder().setLongSerializationPolicy(LongSerializationPolicy.STRING).create();
String result = gson.toJson(15L);
assertThat(result).isEqualTo("\"15\"");
// Test with an integer and ensure its still a number
result = gson.toJson(2);
assertThat(result).isEqualTo("2");
}
@Test
public void testLongAsStringDeserialization() {
long value = gson.fromJson("\"15\"", long.class);
assertThat(value).isEqualTo(15);
gson = new GsonBuilder().setLongSerializationPolicy(LongSerializationPolicy.STRING).create();
value = gson.fromJson("\"25\"", long.class);
assertThat(value).isEqualTo(25);
}
@Test
public void testQuotedStringSerializationAndDeserialization() {
String value = "String Blah Blah Blah...1, 2, 3";
String serializedForm = gson.toJson(value);
assertThat(serializedForm).isEqualTo("\"" + value + "\"");
String actual = gson.fromJson(serializedForm, String.class);
assertThat(actual).isEqualTo(value);
}
@Test
public void testUnquotedStringDeserializationFails() {
assertThat(gson.fromJson("UnquotedSingleWord", String.class)).isEqualTo("UnquotedSingleWord");
String value = "String Blah Blah Blah...1, 2, 3";
assertThrows(JsonSyntaxException.class, () -> gson.fromJson(value, String.class));
}
@Test
public void testHtmlCharacterSerialization() {
String target = "<script>var a = 12;</script>";
String result = gson.toJson(target);
assertThat(result).isNotEqualTo('"' + target + '"');
gson = new GsonBuilder().disableHtmlEscaping().create();
result = gson.toJson(target);
assertThat(result).isEqualTo('"' + target + '"');
}
@Test
public void testDeserializePrimitiveWrapperAsObjectField() {
String json = "{i:10}";
ClassWithIntegerField target = gson.fromJson(json, ClassWithIntegerField.class);
assertThat(target.i).isEqualTo(10);
}
private static class ClassWithIntegerField {
Integer i;
}
@Test
public void testPrimitiveClassLiteral() {
assertThat(gson.fromJson("1", int.class)).isEqualTo(1);
assertThat(gson.fromJson(new StringReader("1"), int.class)).isEqualTo(1);
assertThat(gson.fromJson(new JsonPrimitive(1), int.class)).isEqualTo(1);
}
@Test
public void testDeserializeJsonObjectAsLongPrimitive() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'abc':1}", long.class));
}
@Test
public void testDeserializeJsonArrayAsLongWrapper() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3]", Long.class));
}
@Test
public void testDeserializeJsonArrayAsInt() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1, 2, 3, 4]", int.class));
}
@Test
public void testDeserializeJsonObjectAsInteger() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{}", Integer.class));
}
@Test
public void testDeserializeJsonObjectAsShortPrimitive() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'abc':1}", short.class));
}
@Test
public void testDeserializeJsonArrayAsShortWrapper() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("['a','b']", Short.class));
}
@Test
public void testDeserializeJsonArrayAsDoublePrimitive() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2]", double.class));
}
@Test
public void testDeserializeJsonObjectAsDoubleWrapper() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'abc':1}", Double.class));
}
@Test
public void testDeserializeJsonObjectAsFloatPrimitive() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'abc':1}", float.class));
}
@Test
public void testDeserializeJsonArrayAsFloatWrapper() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3]", Float.class));
}
@Test
public void testDeserializeJsonObjectAsBytePrimitive() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'abc':1}", byte.class));
}
@Test
public void testDeserializeJsonArrayAsByteWrapper() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3,4]", Byte.class));
}
@Test
public void testDeserializeJsonObjectAsBooleanPrimitive() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'abc':1}", boolean.class));
}
@Test
public void testDeserializeJsonArrayAsBooleanWrapper() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3,4]", Boolean.class));
}
@Test
public void testDeserializeJsonArrayAsBigDecimal() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3,4]", BigDecimal.class));
}
@Test
public void testDeserializeJsonObjectAsBigDecimal() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'a':1}", BigDecimal.class));
}
@Test
public void testDeserializeJsonArrayAsBigInteger() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3,4]", BigInteger.class));
}
@Test
public void testDeserializeJsonObjectAsBigInteger() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'c':2}", BigInteger.class));
}
@Test
public void testDeserializeJsonArrayAsNumber() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("[1,2,3,4]", Number.class));
}
@Test
public void testDeserializeJsonObjectAsNumber() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("{'c':2}", Number.class));
}
@Test
public void testDeserializingDecimalPointValueZeroSucceeds() {
assertThat(gson.fromJson("1.0", Integer.class)).isEqualTo(1);
}
@Test
public void testDeserializingNonZeroDecimalPointValuesAsIntegerFails() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("1.02", Byte.class));
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("1.02", Short.class));
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("1.02", Integer.class));
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("1.02", Long.class));
}
@Test
public void testDeserializingBigDecimalAsIntegerFails() {
JsonSyntaxException e =
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("-122.08e-213", Integer.class));
assertThat(e)
.hasCauseThat()
.hasMessageThat()
.isEqualTo("Expected an int but was -122.08e-213 at line 1 column 13 path $");
}
@Test
public void testDeserializingBigIntegerAsInteger() {
String number = "12121211243123245845384534687435634558945453489543985435";
JsonSyntaxException e =
assertThrows(JsonSyntaxException.class, () -> gson.fromJson(number, Integer.class));
assertThat(e)
.hasCauseThat()
.hasMessageThat()
.isEqualTo("Expected an int but was " + number + " at line 1 column 57 path $");
}
@Test
public void testDeserializingBigIntegerAsLong() {
String number = "12121211243123245845384534687435634558945453489543985435";
JsonSyntaxException e =
assertThrows(JsonSyntaxException.class, () -> gson.fromJson(number, Long.class));
assertThat(e)
.hasCauseThat()
.hasMessageThat()
.isEqualTo("Expected a long but was " + number + " at line 1 column 57 path $");
}
@Test
public void testValueVeryCloseToZeroIsZero() {
assertThat(gson.fromJson("-122.08e-2132", byte.class)).isEqualTo(0);
assertThat(gson.fromJson("-122.08e-2132", short.class)).isEqualTo(0);
assertThat(gson.fromJson("-122.08e-2132", int.class)).isEqualTo(0);
assertThat(gson.fromJson("-122.08e-2132", long.class)).isEqualTo(0);
assertThat(gson.fromJson("-122.08e-2132", float.class)).isEqualTo(-0.0f);
assertThat(gson.fromJson("-122.08e-2132", double.class)).isEqualTo(-0.0);
assertThat(gson.fromJson("122.08e-2132", float.class)).isEqualTo(0.0f);
assertThat(gson.fromJson("122.08e-2132", double.class)).isEqualTo(0.0);
}
@Test
public void testDeserializingBigDecimalAsBigIntegerFails() {
assertThrows(JsonSyntaxException.class, () -> gson.fromJson("-122.08e-213", BigInteger.class));
}
@Test
public void testDeserializingBigIntegerAsBigDecimal() {
BigDecimal actual =
gson.fromJson("12121211243123245845384534687435634558945453489543985435", BigDecimal.class);
assertThat(actual.toPlainString())
.isEqualTo("12121211243123245845384534687435634558945453489543985435");
}
@Test
public void testStringsAsBooleans() {
String json = "['true', 'false', 'TRUE', 'yes', '1']";
List<Boolean> deserialized = gson.fromJson(json, new TypeToken<List<Boolean>>() {});
assertThat(deserialized).isEqualTo(Arrays.asList(true, false, true, false, false));
}
}
|
googleapis/google-cloud-java | 36,143 | java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/AdvancedMachineFeatures.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1;
/**
*
*
* <pre>
* Specifies options for controlling advanced machine features.
* </pre>
*
* Protobuf type {@code google.container.v1.AdvancedMachineFeatures}
*/
public final class AdvancedMachineFeatures extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1.AdvancedMachineFeatures)
AdvancedMachineFeaturesOrBuilder {
private static final long serialVersionUID = 0L;
// Use AdvancedMachineFeatures.newBuilder() to construct.
private AdvancedMachineFeatures(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AdvancedMachineFeatures() {
performanceMonitoringUnit_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AdvancedMachineFeatures();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_AdvancedMachineFeatures_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_AdvancedMachineFeatures_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.AdvancedMachineFeatures.class,
com.google.container.v1.AdvancedMachineFeatures.Builder.class);
}
/**
*
*
* <pre>
* Level of PMU access.
* </pre>
*
* Protobuf enum {@code google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit}
*/
public enum PerformanceMonitoringUnit implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* PMU not enabled.
* </pre>
*
* <code>PERFORMANCE_MONITORING_UNIT_UNSPECIFIED = 0;</code>
*/
PERFORMANCE_MONITORING_UNIT_UNSPECIFIED(0),
/**
*
*
* <pre>
* Architecturally defined non-LLC events.
* </pre>
*
* <code>ARCHITECTURAL = 1;</code>
*/
ARCHITECTURAL(1),
/**
*
*
* <pre>
* Most documented core/L2 events.
* </pre>
*
* <code>STANDARD = 2;</code>
*/
STANDARD(2),
/**
*
*
* <pre>
* Most documented core/L2 and LLC events.
* </pre>
*
* <code>ENHANCED = 3;</code>
*/
ENHANCED(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* PMU not enabled.
* </pre>
*
* <code>PERFORMANCE_MONITORING_UNIT_UNSPECIFIED = 0;</code>
*/
public static final int PERFORMANCE_MONITORING_UNIT_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Architecturally defined non-LLC events.
* </pre>
*
* <code>ARCHITECTURAL = 1;</code>
*/
public static final int ARCHITECTURAL_VALUE = 1;
/**
*
*
* <pre>
* Most documented core/L2 events.
* </pre>
*
* <code>STANDARD = 2;</code>
*/
public static final int STANDARD_VALUE = 2;
/**
*
*
* <pre>
* Most documented core/L2 and LLC events.
* </pre>
*
* <code>ENHANCED = 3;</code>
*/
public static final int ENHANCED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static PerformanceMonitoringUnit valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static PerformanceMonitoringUnit forNumber(int value) {
switch (value) {
case 0:
return PERFORMANCE_MONITORING_UNIT_UNSPECIFIED;
case 1:
return ARCHITECTURAL;
case 2:
return STANDARD;
case 3:
return ENHANCED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<PerformanceMonitoringUnit>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<PerformanceMonitoringUnit>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<PerformanceMonitoringUnit>() {
public PerformanceMonitoringUnit findValueByNumber(int number) {
return PerformanceMonitoringUnit.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.container.v1.AdvancedMachineFeatures.getDescriptor().getEnumTypes().get(0);
}
private static final PerformanceMonitoringUnit[] VALUES = values();
public static PerformanceMonitoringUnit valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private PerformanceMonitoringUnit(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit)
}
private int bitField0_;
public static final int THREADS_PER_CORE_FIELD_NUMBER = 1;
private long threadsPerCore_ = 0L;
/**
*
*
* <pre>
* The number of threads per physical core. To disable simultaneous
* multithreading (SMT) set this to 1. If unset, the maximum number of threads
* supported per core by the underlying processor is assumed.
* </pre>
*
* <code>optional int64 threads_per_core = 1;</code>
*
* @return Whether the threadsPerCore field is set.
*/
@java.lang.Override
public boolean hasThreadsPerCore() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The number of threads per physical core. To disable simultaneous
* multithreading (SMT) set this to 1. If unset, the maximum number of threads
* supported per core by the underlying processor is assumed.
* </pre>
*
* <code>optional int64 threads_per_core = 1;</code>
*
* @return The threadsPerCore.
*/
@java.lang.Override
public long getThreadsPerCore() {
return threadsPerCore_;
}
public static final int ENABLE_NESTED_VIRTUALIZATION_FIELD_NUMBER = 2;
private boolean enableNestedVirtualization_ = false;
/**
*
*
* <pre>
* Whether or not to enable nested virtualization (defaults to false).
* </pre>
*
* <code>optional bool enable_nested_virtualization = 2;</code>
*
* @return Whether the enableNestedVirtualization field is set.
*/
@java.lang.Override
public boolean hasEnableNestedVirtualization() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Whether or not to enable nested virtualization (defaults to false).
* </pre>
*
* <code>optional bool enable_nested_virtualization = 2;</code>
*
* @return The enableNestedVirtualization.
*/
@java.lang.Override
public boolean getEnableNestedVirtualization() {
return enableNestedVirtualization_;
}
public static final int PERFORMANCE_MONITORING_UNIT_FIELD_NUMBER = 3;
private int performanceMonitoringUnit_ = 0;
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return Whether the performanceMonitoringUnit field is set.
*/
@java.lang.Override
public boolean hasPerformanceMonitoringUnit() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return The enum numeric value on the wire for performanceMonitoringUnit.
*/
@java.lang.Override
public int getPerformanceMonitoringUnitValue() {
return performanceMonitoringUnit_;
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return The performanceMonitoringUnit.
*/
@java.lang.Override
public com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit
getPerformanceMonitoringUnit() {
com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit result =
com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit.forNumber(
performanceMonitoringUnit_);
return result == null
? com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(1, threadsPerCore_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(2, enableNestedVirtualization_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeEnum(3, performanceMonitoringUnit_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, threadsPerCore_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, enableNestedVirtualization_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, performanceMonitoringUnit_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1.AdvancedMachineFeatures)) {
return super.equals(obj);
}
com.google.container.v1.AdvancedMachineFeatures other =
(com.google.container.v1.AdvancedMachineFeatures) obj;
if (hasThreadsPerCore() != other.hasThreadsPerCore()) return false;
if (hasThreadsPerCore()) {
if (getThreadsPerCore() != other.getThreadsPerCore()) return false;
}
if (hasEnableNestedVirtualization() != other.hasEnableNestedVirtualization()) return false;
if (hasEnableNestedVirtualization()) {
if (getEnableNestedVirtualization() != other.getEnableNestedVirtualization()) return false;
}
if (hasPerformanceMonitoringUnit() != other.hasPerformanceMonitoringUnit()) return false;
if (hasPerformanceMonitoringUnit()) {
if (performanceMonitoringUnit_ != other.performanceMonitoringUnit_) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasThreadsPerCore()) {
hash = (37 * hash) + THREADS_PER_CORE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getThreadsPerCore());
}
if (hasEnableNestedVirtualization()) {
hash = (37 * hash) + ENABLE_NESTED_VIRTUALIZATION_FIELD_NUMBER;
hash =
(53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableNestedVirtualization());
}
if (hasPerformanceMonitoringUnit()) {
hash = (37 * hash) + PERFORMANCE_MONITORING_UNIT_FIELD_NUMBER;
hash = (53 * hash) + performanceMonitoringUnit_;
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.AdvancedMachineFeatures parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1.AdvancedMachineFeatures parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.AdvancedMachineFeatures parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1.AdvancedMachineFeatures prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Specifies options for controlling advanced machine features.
* </pre>
*
* Protobuf type {@code google.container.v1.AdvancedMachineFeatures}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1.AdvancedMachineFeatures)
com.google.container.v1.AdvancedMachineFeaturesOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_AdvancedMachineFeatures_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_AdvancedMachineFeatures_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.AdvancedMachineFeatures.class,
com.google.container.v1.AdvancedMachineFeatures.Builder.class);
}
// Construct using com.google.container.v1.AdvancedMachineFeatures.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
threadsPerCore_ = 0L;
enableNestedVirtualization_ = false;
performanceMonitoringUnit_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_AdvancedMachineFeatures_descriptor;
}
@java.lang.Override
public com.google.container.v1.AdvancedMachineFeatures getDefaultInstanceForType() {
return com.google.container.v1.AdvancedMachineFeatures.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1.AdvancedMachineFeatures build() {
com.google.container.v1.AdvancedMachineFeatures result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1.AdvancedMachineFeatures buildPartial() {
com.google.container.v1.AdvancedMachineFeatures result =
new com.google.container.v1.AdvancedMachineFeatures(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1.AdvancedMachineFeatures result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.threadsPerCore_ = threadsPerCore_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.enableNestedVirtualization_ = enableNestedVirtualization_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.performanceMonitoringUnit_ = performanceMonitoringUnit_;
to_bitField0_ |= 0x00000004;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1.AdvancedMachineFeatures) {
return mergeFrom((com.google.container.v1.AdvancedMachineFeatures) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1.AdvancedMachineFeatures other) {
if (other == com.google.container.v1.AdvancedMachineFeatures.getDefaultInstance())
return this;
if (other.hasThreadsPerCore()) {
setThreadsPerCore(other.getThreadsPerCore());
}
if (other.hasEnableNestedVirtualization()) {
setEnableNestedVirtualization(other.getEnableNestedVirtualization());
}
if (other.hasPerformanceMonitoringUnit()) {
setPerformanceMonitoringUnit(other.getPerformanceMonitoringUnit());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
threadsPerCore_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
enableNestedVirtualization_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
performanceMonitoringUnit_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long threadsPerCore_;
/**
*
*
* <pre>
* The number of threads per physical core. To disable simultaneous
* multithreading (SMT) set this to 1. If unset, the maximum number of threads
* supported per core by the underlying processor is assumed.
* </pre>
*
* <code>optional int64 threads_per_core = 1;</code>
*
* @return Whether the threadsPerCore field is set.
*/
@java.lang.Override
public boolean hasThreadsPerCore() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The number of threads per physical core. To disable simultaneous
* multithreading (SMT) set this to 1. If unset, the maximum number of threads
* supported per core by the underlying processor is assumed.
* </pre>
*
* <code>optional int64 threads_per_core = 1;</code>
*
* @return The threadsPerCore.
*/
@java.lang.Override
public long getThreadsPerCore() {
return threadsPerCore_;
}
/**
*
*
* <pre>
* The number of threads per physical core. To disable simultaneous
* multithreading (SMT) set this to 1. If unset, the maximum number of threads
* supported per core by the underlying processor is assumed.
* </pre>
*
* <code>optional int64 threads_per_core = 1;</code>
*
* @param value The threadsPerCore to set.
* @return This builder for chaining.
*/
public Builder setThreadsPerCore(long value) {
threadsPerCore_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The number of threads per physical core. To disable simultaneous
* multithreading (SMT) set this to 1. If unset, the maximum number of threads
* supported per core by the underlying processor is assumed.
* </pre>
*
* <code>optional int64 threads_per_core = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearThreadsPerCore() {
bitField0_ = (bitField0_ & ~0x00000001);
threadsPerCore_ = 0L;
onChanged();
return this;
}
private boolean enableNestedVirtualization_;
/**
*
*
* <pre>
* Whether or not to enable nested virtualization (defaults to false).
* </pre>
*
* <code>optional bool enable_nested_virtualization = 2;</code>
*
* @return Whether the enableNestedVirtualization field is set.
*/
@java.lang.Override
public boolean hasEnableNestedVirtualization() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Whether or not to enable nested virtualization (defaults to false).
* </pre>
*
* <code>optional bool enable_nested_virtualization = 2;</code>
*
* @return The enableNestedVirtualization.
*/
@java.lang.Override
public boolean getEnableNestedVirtualization() {
return enableNestedVirtualization_;
}
/**
*
*
* <pre>
* Whether or not to enable nested virtualization (defaults to false).
* </pre>
*
* <code>optional bool enable_nested_virtualization = 2;</code>
*
* @param value The enableNestedVirtualization to set.
* @return This builder for chaining.
*/
public Builder setEnableNestedVirtualization(boolean value) {
enableNestedVirtualization_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Whether or not to enable nested virtualization (defaults to false).
* </pre>
*
* <code>optional bool enable_nested_virtualization = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearEnableNestedVirtualization() {
bitField0_ = (bitField0_ & ~0x00000002);
enableNestedVirtualization_ = false;
onChanged();
return this;
}
private int performanceMonitoringUnit_ = 0;
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return Whether the performanceMonitoringUnit field is set.
*/
@java.lang.Override
public boolean hasPerformanceMonitoringUnit() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return The enum numeric value on the wire for performanceMonitoringUnit.
*/
@java.lang.Override
public int getPerformanceMonitoringUnitValue() {
return performanceMonitoringUnit_;
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @param value The enum numeric value on the wire for performanceMonitoringUnit to set.
* @return This builder for chaining.
*/
public Builder setPerformanceMonitoringUnitValue(int value) {
performanceMonitoringUnit_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return The performanceMonitoringUnit.
*/
@java.lang.Override
public com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit
getPerformanceMonitoringUnit() {
com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit result =
com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit.forNumber(
performanceMonitoringUnit_);
return result == null
? com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @param value The performanceMonitoringUnit to set.
* @return This builder for chaining.
*/
public Builder setPerformanceMonitoringUnit(
com.google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
performanceMonitoringUnit_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Type of Performance Monitoring Unit (PMU) requested on node pool instances.
* If unset, PMU will not be available to the node.
* </pre>
*
* <code>
* optional .google.container.v1.AdvancedMachineFeatures.PerformanceMonitoringUnit performance_monitoring_unit = 3;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearPerformanceMonitoringUnit() {
bitField0_ = (bitField0_ & ~0x00000004);
performanceMonitoringUnit_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1.AdvancedMachineFeatures)
}
// @@protoc_insertion_point(class_scope:google.container.v1.AdvancedMachineFeatures)
private static final com.google.container.v1.AdvancedMachineFeatures DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1.AdvancedMachineFeatures();
}
public static com.google.container.v1.AdvancedMachineFeatures getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AdvancedMachineFeatures> PARSER =
new com.google.protobuf.AbstractParser<AdvancedMachineFeatures>() {
@java.lang.Override
public AdvancedMachineFeatures parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AdvancedMachineFeatures> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AdvancedMachineFeatures> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1.AdvancedMachineFeatures getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 36,460 | flink-runtime/src/test/java/org/apache/flink/runtime/clusterframework/TaskExecutorProcessUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.clusterframework;
import org.apache.flink.api.common.resources.CPUResource;
import org.apache.flink.api.common.resources.ExternalResource;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ConfigurationUtils;
import org.apache.flink.configuration.ExternalResourceOptions;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.resourcemanager.WorkerResourceSpec;
import org.apache.flink.runtime.util.config.memory.ProcessMemoryUtilsTestBase;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Map;
import java.util.function.Consumer;
import static org.apache.flink.configuration.ConfigurationUtils.getLongConfigOption;
import static org.apache.flink.runtime.clusterframework.TaskExecutorProcessUtils.TM_PROCESS_MEMORY_OPTIONS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** Tests for {@link TaskExecutorProcessUtils}. */
class TaskExecutorProcessUtilsTest extends ProcessMemoryUtilsTestBase<TaskExecutorProcessSpec> {
private static final MemorySize TASK_HEAP_SIZE = MemorySize.parse("100m");
private static final MemorySize MANAGED_MEM_SIZE = MemorySize.parse("200m");
private static final MemorySize TOTAL_FLINK_MEM_SIZE = MemorySize.parse("1280m");
private static final MemorySize TOTAL_PROCESS_MEM_SIZE = MemorySize.parse("1536m");
private static final String EXTERNAL_RESOURCE_NAME_1 = "gpu";
private static final String EXTERNAL_RESOURCE_NAME_2 = "custom";
private static final TaskExecutorProcessSpec TM_RESOURCE_SPEC =
new TaskExecutorProcessSpec(
new CPUResource(1.0),
MemorySize.parse("1m"),
MemorySize.parse("2m"),
MemorySize.parse("3m"),
MemorySize.parse("4m"),
MemorySize.parse("5m"),
MemorySize.parse("6m"),
MemorySize.parse("7m"),
MemorySize.parse("8m"),
Arrays.asList(
new ExternalResource(EXTERNAL_RESOURCE_NAME_1, 1),
new ExternalResource(EXTERNAL_RESOURCE_NAME_2, 2)));
public TaskExecutorProcessUtilsTest() {
super(TM_PROCESS_MEMORY_OPTIONS, TaskManagerOptions.TOTAL_PROCESS_MEMORY);
}
@Test
void testGenerateDynamicConfigurations() {
String dynamicConfigsStr =
TaskExecutorProcessUtils.generateDynamicConfigsStr(TM_RESOURCE_SPEC);
Map<String, String> configs =
ConfigurationUtils.parseTmResourceDynamicConfigs(dynamicConfigsStr);
assertThat(
new CPUResource(
Double.parseDouble(
configs.get(TaskManagerOptions.CPU_CORES.key()))))
.isEqualTo(TM_RESOURCE_SPEC.getCpuCores());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY.key())))
.isEqualTo(TM_RESOURCE_SPEC.getFrameworkHeapSize());
assertThat(
MemorySize.parse(
configs.get(TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY.key())))
.isEqualTo(TM_RESOURCE_SPEC.getFrameworkOffHeapMemorySize());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.TASK_HEAP_MEMORY.key())))
.isEqualTo(TM_RESOURCE_SPEC.getTaskHeapSize());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.TASK_OFF_HEAP_MEMORY.key())))
.isEqualTo(TM_RESOURCE_SPEC.getTaskOffHeapSize());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.NETWORK_MEMORY_MAX.key())))
.isEqualTo(TM_RESOURCE_SPEC.getNetworkMemSize());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.NETWORK_MEMORY_MIN.key())))
.isEqualTo(TM_RESOURCE_SPEC.getNetworkMemSize());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.MANAGED_MEMORY_SIZE.key())))
.isEqualTo(TM_RESOURCE_SPEC.getManagedMemorySize());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.JVM_METASPACE.key())))
.isEqualTo(TM_RESOURCE_SPEC.getJvmMetaspaceAndOverhead().getMetaspace());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.JVM_OVERHEAD_MIN.key())))
.isEqualTo(TM_RESOURCE_SPEC.getJvmMetaspaceAndOverhead().getOverhead());
assertThat(MemorySize.parse(configs.get(TaskManagerOptions.JVM_OVERHEAD_MAX.key())))
.isEqualTo(TM_RESOURCE_SPEC.getJvmMetaspaceAndOverhead().getOverhead());
assertThat(Integer.valueOf(configs.get(TaskManagerOptions.NUM_TASK_SLOTS.key())))
.isEqualTo(TM_RESOURCE_SPEC.getNumSlots());
assertThat(configs.get(ExternalResourceOptions.EXTERNAL_RESOURCE_LIST.key()))
.isEqualTo(
'"'
+ String.join(";", TM_RESOURCE_SPEC.getExtendedResources().keySet())
+ '"');
assertThat(
configs.get(
ExternalResourceOptions.getAmountConfigOptionForResource(
EXTERNAL_RESOURCE_NAME_1)))
.isEqualTo(
String.valueOf(
TM_RESOURCE_SPEC
.getExtendedResources()
.get(EXTERNAL_RESOURCE_NAME_1)
.getValue()
.longValue()));
assertThat(
configs.get(
ExternalResourceOptions.getAmountConfigOptionForResource(
EXTERNAL_RESOURCE_NAME_2)))
.isEqualTo(
String.valueOf(
TM_RESOURCE_SPEC
.getExtendedResources()
.get(EXTERNAL_RESOURCE_NAME_2)
.getValue()
.longValue()));
}
@Test
void testProcessSpecFromWorkerResourceSpec() {
final WorkerResourceSpec workerResourceSpec =
new WorkerResourceSpec.Builder()
.setCpuCores(1.0)
.setTaskHeapMemoryMB(100)
.setTaskOffHeapMemoryMB(200)
.setNetworkMemoryMB(300)
.setManagedMemoryMB(400)
.setNumSlots(5)
.setExtendedResource(new ExternalResource(EXTERNAL_RESOURCE_NAME_1, 1))
.build();
final TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromWorkerResourceSpec(
new Configuration(), workerResourceSpec);
assertThat(taskExecutorProcessSpec.getCpuCores())
.isEqualTo(workerResourceSpec.getCpuCores());
assertThat(taskExecutorProcessSpec.getTaskHeapSize())
.isEqualTo(workerResourceSpec.getTaskHeapSize());
assertThat(taskExecutorProcessSpec.getTaskOffHeapSize())
.isEqualTo(workerResourceSpec.getTaskOffHeapSize());
assertThat(taskExecutorProcessSpec.getNetworkMemSize())
.isEqualTo(workerResourceSpec.getNetworkMemSize());
assertThat(taskExecutorProcessSpec.getManagedMemorySize())
.isEqualTo(workerResourceSpec.getManagedMemSize());
assertThat(taskExecutorProcessSpec.getNumSlots())
.isEqualTo(workerResourceSpec.getNumSlots());
assertThat(taskExecutorProcessSpec.getExtendedResources())
.isEqualTo(workerResourceSpec.getExtendedResources());
}
@Test
void testConfigCpuCores() {
final double cpuCores = 1.0;
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.CPU_CORES, cpuCores);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getCpuCores())
.isEqualTo(new CPUResource(cpuCores)));
}
@Test
void testConfigNoCpuCores() {
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NUM_TASK_SLOTS, 3);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getCpuCores())
.isEqualTo(new CPUResource(3.0)));
}
@Test
void testConfigNegativeCpuCores() {
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.CPU_CORES, -0.1d);
validateFailInAllConfigurations(conf);
}
@Test
void testConfigFrameworkHeapMemory() {
final MemorySize frameworkHeapSize = MemorySize.parse("100m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY, frameworkHeapSize);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getFrameworkHeapSize())
.isEqualTo(frameworkHeapSize));
}
@Test
void testConfigFrameworkOffHeapMemory() {
final MemorySize frameworkOffHeapSize = MemorySize.parse("10m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY, frameworkOffHeapSize);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getFrameworkOffHeapMemorySize())
.isEqualTo(frameworkOffHeapSize));
}
@Test
void testConfigTaskHeapMemory() {
final MemorySize taskHeapSize = MemorySize.parse("50m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TASK_HEAP_MEMORY, taskHeapSize);
// validate in configurations without explicit task heap memory size,
// to avoid checking against overwritten task heap memory size
validateInConfigurationsWithoutExplicitTaskHeapMem(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getTaskHeapSize())
.isEqualTo(taskHeapSize));
}
@Test
void testConfigTaskOffheapMemory() {
final MemorySize taskOffHeapSize = MemorySize.parse("50m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TASK_OFF_HEAP_MEMORY, taskOffHeapSize);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getTaskOffHeapSize())
.isEqualTo(taskOffHeapSize));
}
@Test
void testConfigNetworkMemoryRange() {
final MemorySize networkMin = MemorySize.parse("200m");
final MemorySize networkMax = MemorySize.parse("500m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NETWORK_MEMORY_MAX, networkMax);
conf.set(TaskManagerOptions.NETWORK_MEMORY_MIN, networkMin);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec -> {
assertThat(taskExecutorProcessSpec.getNetworkMemSize().getBytes())
.isGreaterThanOrEqualTo(networkMin.getBytes());
assertThat(taskExecutorProcessSpec.getNetworkMemSize().getBytes())
.isLessThanOrEqualTo(networkMax.getBytes());
});
}
@Test
void testConsistencyCheckOfDerivedNetworkMemoryWithinMinMaxRangeNotMatchingFractionPasses() {
final Configuration configuration =
setupConfigWithFlinkAndTaskHeapToDeriveGivenNetworkMem(400);
// set fraction to be extremely low to not match the derived network memory
configuration.set(TaskManagerOptions.NETWORK_MEMORY_FRACTION, 0.001f);
// internal validation should pass
TaskExecutorProcessUtils.processSpecFromConfig(configuration);
}
@Test
public void testConsistencyCheckOfDerivedNetworkMemoryLessThanMinFails() {
final Configuration configuration =
setupConfigWithFlinkAndTaskHeapToDeriveGivenNetworkMem(500);
configuration.set(TaskManagerOptions.NETWORK_MEMORY_MIN, MemorySize.parse("900m"));
configuration.set(TaskManagerOptions.NETWORK_MEMORY_MAX, MemorySize.parse("1000m"));
// internal validation should fail
assertThatExceptionOfType(IllegalConfigurationException.class)
.isThrownBy(() -> TaskExecutorProcessUtils.processSpecFromConfig(configuration));
}
@Test
public void testConsistencyCheckOfDerivedNetworkMemoryGreaterThanMaxFails() {
final Configuration configuration =
setupConfigWithFlinkAndTaskHeapToDeriveGivenNetworkMem(500);
configuration.set(TaskManagerOptions.NETWORK_MEMORY_MIN, MemorySize.parse("100m"));
configuration.set(TaskManagerOptions.NETWORK_MEMORY_MAX, MemorySize.parse("150m"));
// internal validation should fail
assertThatExceptionOfType(IllegalConfigurationException.class)
.isThrownBy(() -> TaskExecutorProcessUtils.processSpecFromConfig(configuration));
}
private static Configuration setupConfigWithFlinkAndTaskHeapToDeriveGivenNetworkMem(
final int networkMemorySizeToDeriveMb) {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, TOTAL_FLINK_MEM_SIZE);
conf.set(TaskManagerOptions.TASK_HEAP_MEMORY, TASK_HEAP_SIZE);
conf.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MANAGED_MEM_SIZE);
final TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(conf);
final int derivedNetworkMemorySizeMb =
taskExecutorProcessSpec.getNetworkMemSize().getMebiBytes();
if (derivedNetworkMemorySizeMb < networkMemorySizeToDeriveMb) {
// adjust total Flink memory size to accommodate for more network memory
final int adjustedTotalFlinkMemoryMb =
taskExecutorProcessSpec.getTotalFlinkMemorySize().getMebiBytes()
- derivedNetworkMemorySizeMb
+ networkMemorySizeToDeriveMb;
conf.set(
TaskManagerOptions.TOTAL_FLINK_MEMORY,
MemorySize.ofMebiBytes(adjustedTotalFlinkMemoryMb));
} else if (derivedNetworkMemorySizeMb > networkMemorySizeToDeriveMb) {
// reduce derived network memory by increasing task heap size
final int adjustedTaskHeapMemoryMb =
taskExecutorProcessSpec.getTaskHeapSize().getMebiBytes()
+ derivedNetworkMemorySizeMb
- networkMemorySizeToDeriveMb;
conf.set(
TaskManagerOptions.TASK_HEAP_MEMORY,
MemorySize.ofMebiBytes(adjustedTaskHeapMemoryMb));
}
final TaskExecutorProcessSpec adjusteedTaskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(conf);
assertThat(adjusteedTaskExecutorProcessSpec.getNetworkMemSize().getMebiBytes())
.isEqualTo(networkMemorySizeToDeriveMb);
return conf;
}
@Test
void testConfigNetworkMemoryRangeFailure() {
final MemorySize networkMin = MemorySize.parse("200m");
final MemorySize networkMax = MemorySize.parse("50m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NETWORK_MEMORY_MAX, networkMax);
conf.set(TaskManagerOptions.NETWORK_MEMORY_MIN, networkMin);
validateFailInAllConfigurations(conf);
}
@Test
void testConfigNetworkMemoryFraction() {
final MemorySize networkMin = MemorySize.ZERO;
final MemorySize networkMax = MemorySize.parse("1t");
final float fraction = 0.2f;
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NETWORK_MEMORY_MAX, networkMax);
conf.set(TaskManagerOptions.NETWORK_MEMORY_MIN, networkMin);
conf.set(TaskManagerOptions.NETWORK_MEMORY_FRACTION, fraction);
// validate in configurations without explicit total flink/process memory, otherwise
// explicit configured
// network memory fraction might conflict with total flink/process memory minus other memory
// sizes
validateInConfigWithExplicitTaskHeapAndManagedMem(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getNetworkMemSize())
.isEqualTo(
taskExecutorProcessSpec
.getTotalFlinkMemorySize()
.multiply(fraction)));
}
@Test
void testConfigNetworkMemoryFractionFailure() {
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NETWORK_MEMORY_FRACTION, -0.1f);
validateFailInAllConfigurations(conf);
conf.set(TaskManagerOptions.NETWORK_MEMORY_FRACTION, 1.0f);
validateFailInAllConfigurations(conf);
}
@Test
void testConfigManagedMemorySize() {
final MemorySize managedMemSize = MemorySize.parse("100m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, managedMemSize);
// validate in configurations without explicit managed memory size,
// to avoid checking against overwritten managed memory size
validateInConfigurationsWithoutExplicitManagedMem(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getManagedMemorySize())
.isEqualTo(managedMemSize));
}
@Test
void testConfigManagedMemoryLegacySize() {
final MemorySize managedMemSize = MemorySize.parse("100m");
@SuppressWarnings("deprecation")
final ConfigOption<MemorySize> legacyOption = TaskManagerOptions.MANAGED_MEMORY_SIZE;
Configuration conf = new Configuration();
conf.set(legacyOption, managedMemSize);
// validate in configurations without explicit managed memory size,
// to avoid checking against overwritten managed memory size
validateInConfigurationsWithoutExplicitManagedMem(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getManagedMemorySize())
.isEqualTo(managedMemSize));
}
@Test
void testConfigManagedMemoryFraction() {
final float fraction = 0.5f;
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.MANAGED_MEMORY_FRACTION, fraction);
// managed memory fraction is only used when managed memory size is not explicitly
// configured
validateInConfigurationsWithoutExplicitManagedMem(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getManagedMemorySize())
.isEqualTo(
taskExecutorProcessSpec
.getTotalFlinkMemorySize()
.multiply(fraction)));
}
@Test
void testConfigManagedMemoryFractionFailure() {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.MANAGED_MEMORY_FRACTION, -0.1f);
validateFailInConfigurationsWithoutExplicitManagedMem(conf);
conf.set(TaskManagerOptions.MANAGED_MEMORY_FRACTION, 1.0f);
validateFailInConfigurationsWithoutExplicitManagedMem(conf);
}
@Test
void testFlinkInternalMemorySizeAddUpFailure() {
final MemorySize totalFlinkMemory = MemorySize.parse("499m");
final MemorySize frameworkHeap = MemorySize.parse("100m");
final MemorySize taskHeap = MemorySize.parse("100m");
final MemorySize taskOffHeap = MemorySize.parse("100m");
final MemorySize network = MemorySize.parse("100m");
final MemorySize managed = MemorySize.parse("100m");
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, totalFlinkMemory);
conf.set(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY, frameworkHeap);
conf.set(TaskManagerOptions.TASK_HEAP_MEMORY, taskHeap);
conf.set(TaskManagerOptions.TASK_OFF_HEAP_MEMORY, taskOffHeap);
conf.set(TaskManagerOptions.NETWORK_MEMORY_MIN, network);
conf.set(TaskManagerOptions.NETWORK_MEMORY_MAX, network);
conf.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, managed);
validateFail(conf);
}
@Test
void testFlinkInternalMemoryFractionAddUpFailure() {
final float networkFraction = 0.6f;
final float managedFraction = 0.6f;
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NETWORK_MEMORY_FRACTION, networkFraction);
conf.set(TaskManagerOptions.MANAGED_MEMORY_FRACTION, managedFraction);
// if managed memory size is explicitly configured, then managed memory fraction will be
// ignored
validateFailInConfigWithExplicitTotalFlinkMem(conf);
validateFailInConfigWithExplicitTotalProcessMem(conf);
}
@Test
void testConfigTotalProcessMemoryLegacySize() {
final MemorySize totalProcessMemorySize = MemorySize.parse("2g");
@SuppressWarnings("deprecation")
final ConfigOption<MemorySize> legacyOption = TaskManagerOptions.TOTAL_PROCESS_MEMORY;
Configuration conf = new Configuration();
conf.set(legacyOption, totalProcessMemorySize);
TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(conf);
assertThat(taskExecutorProcessSpec.getTotalProcessMemorySize())
.isEqualTo(totalProcessMemorySize);
}
@Test
public void testExceptionShouldContainRequiredConfigOptions() {
assertThatThrownBy(
() -> TaskExecutorProcessUtils.processSpecFromConfig(new Configuration()))
.isInstanceOf(IllegalConfigurationException.class)
.hasMessageContaining(TaskManagerOptions.TASK_HEAP_MEMORY.key())
.hasMessageContaining(TaskManagerOptions.MANAGED_MEMORY_SIZE.key())
.hasMessageContaining(TaskManagerOptions.TOTAL_FLINK_MEMORY.key())
.hasMessageContaining(TaskManagerOptions.TOTAL_PROCESS_MEMORY.key());
}
@Test
void testConfigNumSlots() {
final int numSlots = 5;
Configuration conf = new Configuration();
conf.set(TaskManagerOptions.NUM_TASK_SLOTS, numSlots);
validateInAllConfigurations(
conf,
taskExecutorProcessSpec ->
assertThat(taskExecutorProcessSpec.getNumSlots()).isEqualTo(numSlots));
}
@Test
void testProcessSpecFromConfigWithExternalResource() {
final Configuration config = new Configuration();
config.setString(
ExternalResourceOptions.EXTERNAL_RESOURCE_LIST.key(), EXTERNAL_RESOURCE_NAME_1);
config.set(
getLongConfigOption(
ExternalResourceOptions.getAmountConfigOptionForResource(
EXTERNAL_RESOURCE_NAME_1)),
1L);
config.set(TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(4096));
final TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(config);
assertThat(taskExecutorProcessSpec.getExtendedResources()).hasSize(1);
assertThat(
taskExecutorProcessSpec
.getExtendedResources()
.get(EXTERNAL_RESOURCE_NAME_1)
.getValue()
.longValue())
.isOne();
}
@Override
protected void validateInAllConfigurations(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
validateInConfigWithExplicitTaskHeapAndManagedMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalFlinkMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalFlinkAndTaskHeapMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalFlinkAndManagedMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalProcessMem(customConfig, validateFunc);
}
@Override
protected void validateFailInAllConfigurations(final Configuration customConfig) {
validateFailInConfigWithExplicitTaskHeapAndManagedMem(customConfig);
validateFailInConfigWithExplicitTotalFlinkMem(customConfig);
validateFailInConfigWithExplicitTotalFlinkAndTaskHeapMem(customConfig);
validateFailInConfigWithExplicitTotalFlinkAndManagedMem(customConfig);
validateFailInConfigWithExplicitTotalProcessMem(customConfig);
}
private void validateInConfigurationsWithoutExplicitTaskHeapMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
validateInConfigWithExplicitTotalFlinkMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalFlinkAndManagedMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalProcessMem(customConfig, validateFunc);
}
private void validateInConfigurationsWithoutExplicitManagedMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
validateInConfigWithExplicitTotalFlinkMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalFlinkAndTaskHeapMem(customConfig, validateFunc);
validateInConfigWithExplicitTotalProcessMem(customConfig, validateFunc);
}
private void validateFailInConfigurationsWithoutExplicitManagedMem(
final Configuration customConfig) {
validateFailInConfigWithExplicitTotalFlinkMem(customConfig);
validateFailInConfigWithExplicitTotalFlinkAndTaskHeapMem(customConfig);
validateFailInConfigWithExplicitTotalProcessMem(customConfig);
}
private void validateInConfigWithExplicitTaskHeapAndManagedMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
log.info("Validating in configuration with explicit task heap and managed memory size.");
final Configuration config = configWithExplicitTaskHeapAndManageMem();
config.addAll(customConfig);
TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(config);
assertThat(taskExecutorProcessSpec.getTaskHeapSize()).isEqualTo(TASK_HEAP_SIZE);
assertThat(taskExecutorProcessSpec.getManagedMemorySize()).isEqualTo(MANAGED_MEM_SIZE);
validateFunc.accept(taskExecutorProcessSpec);
}
private void validateFailInConfigWithExplicitTaskHeapAndManagedMem(
final Configuration customConfig) {
log.info(
"Validating failing in configuration with explicit task heap and managed memory size.");
final Configuration config = configWithExplicitTaskHeapAndManageMem();
config.addAll(customConfig);
validateFail(config);
}
private void validateInConfigWithExplicitTotalFlinkMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
log.info("Validating in configuration with explicit total flink memory size.");
final Configuration config = configWithExplicitTotalFlinkMem();
config.addAll(customConfig);
TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(config);
assertThat(taskExecutorProcessSpec.getTotalFlinkMemorySize())
.isEqualTo(TOTAL_FLINK_MEM_SIZE);
validateFunc.accept(taskExecutorProcessSpec);
}
private void validateFailInConfigWithExplicitTotalFlinkMem(final Configuration customConfig) {
log.info("Validating failing in configuration with explicit total flink memory size.");
final Configuration config = configWithExplicitTotalFlinkMem();
config.addAll(customConfig);
validateFail(config);
}
private void validateInConfigWithExplicitTotalFlinkAndTaskHeapMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
log.info(
"Validating in configuration with explicit total flink and task heap memory size.");
final Configuration config = configWithExplicitTotalFlinkAndTaskHeapMem();
config.addAll(customConfig);
TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(config);
assertThat(taskExecutorProcessSpec.getTotalFlinkMemorySize())
.isEqualTo(TOTAL_FLINK_MEM_SIZE);
assertThat(taskExecutorProcessSpec.getTaskHeapSize()).isEqualTo(TASK_HEAP_SIZE);
validateFunc.accept(taskExecutorProcessSpec);
}
private void validateFailInConfigWithExplicitTotalFlinkAndTaskHeapMem(
final Configuration customConfig) {
log.info(
"Validating failing in configuration with explicit total flink and task heap memory size.");
final Configuration config = configWithExplicitTotalFlinkAndTaskHeapMem();
config.addAll(customConfig);
validateFail(config);
}
private void validateInConfigWithExplicitTotalFlinkAndManagedMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
log.info("Validating in configuration with explicit total flink and managed memory size.");
final Configuration config = configWithExplicitTotalFlinkAndManagedMem();
config.addAll(customConfig);
TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(config);
assertThat(taskExecutorProcessSpec.getTotalFlinkMemorySize())
.isEqualTo(TOTAL_FLINK_MEM_SIZE);
assertThat(taskExecutorProcessSpec.getManagedMemorySize()).isEqualTo(MANAGED_MEM_SIZE);
validateFunc.accept(taskExecutorProcessSpec);
}
private void validateFailInConfigWithExplicitTotalFlinkAndManagedMem(
final Configuration customConfig) {
log.info(
"Validating failing in configuration with explicit total flink and managed memory size.");
final Configuration config = configWithExplicitTotalFlinkAndManagedMem();
config.addAll(customConfig);
validateFail(config);
}
private void validateInConfigWithExplicitTotalProcessMem(
final Configuration customConfig, Consumer<TaskExecutorProcessSpec> validateFunc) {
log.info("Validating in configuration with explicit total process memory size.");
final Configuration config = configWithExplicitTotalProcessMem();
config.addAll(customConfig);
TaskExecutorProcessSpec taskExecutorProcessSpec =
TaskExecutorProcessUtils.processSpecFromConfig(config);
assertThat(taskExecutorProcessSpec.getTotalProcessMemorySize())
.isEqualTo(TOTAL_PROCESS_MEM_SIZE);
validateFunc.accept(taskExecutorProcessSpec);
}
private void validateFailInConfigWithExplicitTotalProcessMem(final Configuration customConfig) {
log.info("Validating failing in configuration with explicit total process memory size.");
final Configuration config = configWithExplicitTotalProcessMem();
config.addAll(customConfig);
validateFail(config);
}
@Override
protected void validateFail(final Configuration config) {
assertThatExceptionOfType(IllegalConfigurationException.class)
.as("Configuration did not fail as expected.")
.isThrownBy(() -> TaskExecutorProcessUtils.processSpecFromConfig(config));
}
@Override
protected void configWithFineGrainedOptions(
Configuration configuration, MemorySize totalFlinkMemorySize) {
MemorySize componentSize = new MemorySize(totalFlinkMemorySize.getBytes() / 6);
configuration.set(TaskManagerOptions.TASK_HEAP_MEMORY, componentSize);
configuration.set(TaskManagerOptions.TASK_OFF_HEAP_MEMORY, componentSize);
configuration.set(TaskManagerOptions.FRAMEWORK_HEAP_MEMORY, componentSize);
configuration.set(TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY, componentSize);
configuration.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, componentSize);
// network is the 6th component, fixed implicitly
}
private static Configuration configWithExplicitTaskHeapAndManageMem() {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TASK_HEAP_MEMORY, TASK_HEAP_SIZE);
conf.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MANAGED_MEM_SIZE);
return conf;
}
private static Configuration configWithExplicitTotalFlinkMem() {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, TOTAL_FLINK_MEM_SIZE);
return conf;
}
private static Configuration configWithExplicitTotalFlinkAndTaskHeapMem() {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, TOTAL_FLINK_MEM_SIZE);
conf.set(TaskManagerOptions.TASK_HEAP_MEMORY, TASK_HEAP_SIZE);
return conf;
}
private static Configuration configWithExplicitTotalFlinkAndManagedMem() {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, TOTAL_FLINK_MEM_SIZE);
conf.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MANAGED_MEM_SIZE);
return conf;
}
private static Configuration configWithExplicitTotalProcessMem() {
final Configuration conf = new Configuration();
conf.set(TaskManagerOptions.TOTAL_PROCESS_MEMORY, TOTAL_PROCESS_MEM_SIZE);
return conf;
}
@Override
protected TaskExecutorProcessSpec processSpecFromConfig(Configuration config) {
return TaskExecutorProcessUtils.processSpecFromConfig(config);
}
}
|
googleapis/google-cloud-java | 36,099 | java-service-management/proto-google-cloud-service-management-v1/src/main/java/com/google/api/servicemanagement/v1/ListServiceConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/servicemanagement/v1/servicemanager.proto
// Protobuf Java Version: 3.25.8
package com.google.api.servicemanagement.v1;
/**
*
*
* <pre>
* Response message for ListServiceConfigs method.
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.ListServiceConfigsResponse}
*/
public final class ListServiceConfigsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.servicemanagement.v1.ListServiceConfigsResponse)
ListServiceConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServiceConfigsResponse.newBuilder() to construct.
private ListServiceConfigsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServiceConfigsResponse() {
serviceConfigs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServiceConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.ListServiceConfigsResponse.class,
com.google.api.servicemanagement.v1.ListServiceConfigsResponse.Builder.class);
}
public static final int SERVICE_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.api.Service> serviceConfigs_;
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.Service> getServiceConfigsList() {
return serviceConfigs_;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.ServiceOrBuilder>
getServiceConfigsOrBuilderList() {
return serviceConfigs_;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
@java.lang.Override
public int getServiceConfigsCount() {
return serviceConfigs_.size();
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
@java.lang.Override
public com.google.api.Service getServiceConfigs(int index) {
return serviceConfigs_.get(index);
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
@java.lang.Override
public com.google.api.ServiceOrBuilder getServiceConfigsOrBuilder(int index) {
return serviceConfigs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < serviceConfigs_.size(); i++) {
output.writeMessage(1, serviceConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < serviceConfigs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, serviceConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.servicemanagement.v1.ListServiceConfigsResponse)) {
return super.equals(obj);
}
com.google.api.servicemanagement.v1.ListServiceConfigsResponse other =
(com.google.api.servicemanagement.v1.ListServiceConfigsResponse) obj;
if (!getServiceConfigsList().equals(other.getServiceConfigsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServiceConfigsCount() > 0) {
hash = (37 * hash) + SERVICE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getServiceConfigsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.api.servicemanagement.v1.ListServiceConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListServiceConfigs method.
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.ListServiceConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.servicemanagement.v1.ListServiceConfigsResponse)
com.google.api.servicemanagement.v1.ListServiceConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.ListServiceConfigsResponse.class,
com.google.api.servicemanagement.v1.ListServiceConfigsResponse.Builder.class);
}
// Construct using com.google.api.servicemanagement.v1.ListServiceConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (serviceConfigsBuilder_ == null) {
serviceConfigs_ = java.util.Collections.emptyList();
} else {
serviceConfigs_ = null;
serviceConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.servicemanagement.v1.ServiceManagerProto
.internal_static_google_api_servicemanagement_v1_ListServiceConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceConfigsResponse
getDefaultInstanceForType() {
return com.google.api.servicemanagement.v1.ListServiceConfigsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceConfigsResponse build() {
com.google.api.servicemanagement.v1.ListServiceConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceConfigsResponse buildPartial() {
com.google.api.servicemanagement.v1.ListServiceConfigsResponse result =
new com.google.api.servicemanagement.v1.ListServiceConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.api.servicemanagement.v1.ListServiceConfigsResponse result) {
if (serviceConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
serviceConfigs_ = java.util.Collections.unmodifiableList(serviceConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.serviceConfigs_ = serviceConfigs_;
} else {
result.serviceConfigs_ = serviceConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.api.servicemanagement.v1.ListServiceConfigsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.servicemanagement.v1.ListServiceConfigsResponse) {
return mergeFrom((com.google.api.servicemanagement.v1.ListServiceConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.servicemanagement.v1.ListServiceConfigsResponse other) {
if (other
== com.google.api.servicemanagement.v1.ListServiceConfigsResponse.getDefaultInstance())
return this;
if (serviceConfigsBuilder_ == null) {
if (!other.serviceConfigs_.isEmpty()) {
if (serviceConfigs_.isEmpty()) {
serviceConfigs_ = other.serviceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServiceConfigsIsMutable();
serviceConfigs_.addAll(other.serviceConfigs_);
}
onChanged();
}
} else {
if (!other.serviceConfigs_.isEmpty()) {
if (serviceConfigsBuilder_.isEmpty()) {
serviceConfigsBuilder_.dispose();
serviceConfigsBuilder_ = null;
serviceConfigs_ = other.serviceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
serviceConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServiceConfigsFieldBuilder()
: null;
} else {
serviceConfigsBuilder_.addAllMessages(other.serviceConfigs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.api.Service m =
input.readMessage(com.google.api.Service.parser(), extensionRegistry);
if (serviceConfigsBuilder_ == null) {
ensureServiceConfigsIsMutable();
serviceConfigs_.add(m);
} else {
serviceConfigsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.api.Service> serviceConfigs_ =
java.util.Collections.emptyList();
private void ensureServiceConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
serviceConfigs_ = new java.util.ArrayList<com.google.api.Service>(serviceConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.Service, com.google.api.Service.Builder, com.google.api.ServiceOrBuilder>
serviceConfigsBuilder_;
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public java.util.List<com.google.api.Service> getServiceConfigsList() {
if (serviceConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(serviceConfigs_);
} else {
return serviceConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public int getServiceConfigsCount() {
if (serviceConfigsBuilder_ == null) {
return serviceConfigs_.size();
} else {
return serviceConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public com.google.api.Service getServiceConfigs(int index) {
if (serviceConfigsBuilder_ == null) {
return serviceConfigs_.get(index);
} else {
return serviceConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder setServiceConfigs(int index, com.google.api.Service value) {
if (serviceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServiceConfigsIsMutable();
serviceConfigs_.set(index, value);
onChanged();
} else {
serviceConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder setServiceConfigs(int index, com.google.api.Service.Builder builderForValue) {
if (serviceConfigsBuilder_ == null) {
ensureServiceConfigsIsMutable();
serviceConfigs_.set(index, builderForValue.build());
onChanged();
} else {
serviceConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder addServiceConfigs(com.google.api.Service value) {
if (serviceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServiceConfigsIsMutable();
serviceConfigs_.add(value);
onChanged();
} else {
serviceConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder addServiceConfigs(int index, com.google.api.Service value) {
if (serviceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServiceConfigsIsMutable();
serviceConfigs_.add(index, value);
onChanged();
} else {
serviceConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder addServiceConfigs(com.google.api.Service.Builder builderForValue) {
if (serviceConfigsBuilder_ == null) {
ensureServiceConfigsIsMutable();
serviceConfigs_.add(builderForValue.build());
onChanged();
} else {
serviceConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder addServiceConfigs(int index, com.google.api.Service.Builder builderForValue) {
if (serviceConfigsBuilder_ == null) {
ensureServiceConfigsIsMutable();
serviceConfigs_.add(index, builderForValue.build());
onChanged();
} else {
serviceConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder addAllServiceConfigs(
java.lang.Iterable<? extends com.google.api.Service> values) {
if (serviceConfigsBuilder_ == null) {
ensureServiceConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, serviceConfigs_);
onChanged();
} else {
serviceConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder clearServiceConfigs() {
if (serviceConfigsBuilder_ == null) {
serviceConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
serviceConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public Builder removeServiceConfigs(int index) {
if (serviceConfigsBuilder_ == null) {
ensureServiceConfigsIsMutable();
serviceConfigs_.remove(index);
onChanged();
} else {
serviceConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public com.google.api.Service.Builder getServiceConfigsBuilder(int index) {
return getServiceConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public com.google.api.ServiceOrBuilder getServiceConfigsOrBuilder(int index) {
if (serviceConfigsBuilder_ == null) {
return serviceConfigs_.get(index);
} else {
return serviceConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public java.util.List<? extends com.google.api.ServiceOrBuilder>
getServiceConfigsOrBuilderList() {
if (serviceConfigsBuilder_ != null) {
return serviceConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(serviceConfigs_);
}
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public com.google.api.Service.Builder addServiceConfigsBuilder() {
return getServiceConfigsFieldBuilder()
.addBuilder(com.google.api.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public com.google.api.Service.Builder addServiceConfigsBuilder(int index) {
return getServiceConfigsFieldBuilder()
.addBuilder(index, com.google.api.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of service configuration resources.
* </pre>
*
* <code>repeated .google.api.Service service_configs = 1;</code>
*/
public java.util.List<com.google.api.Service.Builder> getServiceConfigsBuilderList() {
return getServiceConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.Service, com.google.api.Service.Builder, com.google.api.ServiceOrBuilder>
getServiceConfigsFieldBuilder() {
if (serviceConfigsBuilder_ == null) {
serviceConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.Service,
com.google.api.Service.Builder,
com.google.api.ServiceOrBuilder>(
serviceConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
serviceConfigs_ = null;
}
return serviceConfigsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token of the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.servicemanagement.v1.ListServiceConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.api.servicemanagement.v1.ListServiceConfigsResponse)
private static final com.google.api.servicemanagement.v1.ListServiceConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.servicemanagement.v1.ListServiceConfigsResponse();
}
public static com.google.api.servicemanagement.v1.ListServiceConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServiceConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServiceConfigsResponse>() {
@java.lang.Override
public ListServiceConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServiceConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServiceConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.servicemanagement.v1.ListServiceConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,139 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/UpdateDataTaxonomyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/data_taxonomy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* Update DataTaxonomy request.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.UpdateDataTaxonomyRequest}
*/
@java.lang.Deprecated
public final class UpdateDataTaxonomyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.UpdateDataTaxonomyRequest)
UpdateDataTaxonomyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDataTaxonomyRequest.newBuilder() to construct.
private UpdateDataTaxonomyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDataTaxonomyRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDataTaxonomyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.DataTaxonomyProto
.internal_static_google_cloud_dataplex_v1_UpdateDataTaxonomyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.DataTaxonomyProto
.internal_static_google_cloud_dataplex_v1_UpdateDataTaxonomyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.class,
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int DATA_TAXONOMY_FIELD_NUMBER = 2;
private com.google.cloud.dataplex.v1.DataTaxonomy dataTaxonomy_;
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the dataTaxonomy field is set.
*/
@java.lang.Override
public boolean hasDataTaxonomy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The dataTaxonomy.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.DataTaxonomy getDataTaxonomy() {
return dataTaxonomy_ == null
? com.google.cloud.dataplex.v1.DataTaxonomy.getDefaultInstance()
: dataTaxonomy_;
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.DataTaxonomyOrBuilder getDataTaxonomyOrBuilder() {
return dataTaxonomy_ == null
? com.google.cloud.dataplex.v1.DataTaxonomy.getDefaultInstance()
: dataTaxonomy_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 3;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getDataTaxonomy());
}
if (validateOnly_ != false) {
output.writeBool(3, validateOnly_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDataTaxonomy());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest other =
(com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasDataTaxonomy() != other.hasDataTaxonomy()) return false;
if (hasDataTaxonomy()) {
if (!getDataTaxonomy().equals(other.getDataTaxonomy())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasDataTaxonomy()) {
hash = (37 * hash) + DATA_TAXONOMY_FIELD_NUMBER;
hash = (53 * hash) + getDataTaxonomy().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Update DataTaxonomy request.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.UpdateDataTaxonomyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.UpdateDataTaxonomyRequest)
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.DataTaxonomyProto
.internal_static_google_cloud_dataplex_v1_UpdateDataTaxonomyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.DataTaxonomyProto
.internal_static_google_cloud_dataplex_v1_UpdateDataTaxonomyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.class,
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getDataTaxonomyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
dataTaxonomy_ = null;
if (dataTaxonomyBuilder_ != null) {
dataTaxonomyBuilder_.dispose();
dataTaxonomyBuilder_ = null;
}
validateOnly_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.DataTaxonomyProto
.internal_static_google_cloud_dataplex_v1_UpdateDataTaxonomyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest build() {
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest buildPartial() {
com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest result =
new com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.dataTaxonomy_ =
dataTaxonomyBuilder_ == null ? dataTaxonomy_ : dataTaxonomyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.validateOnly_ = validateOnly_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest) {
return mergeFrom((com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest other) {
if (other == com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasDataTaxonomy()) {
mergeDataTaxonomy(other.getDataTaxonomy());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getDataTaxonomyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.dataplex.v1.DataTaxonomy dataTaxonomy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.DataTaxonomy,
com.google.cloud.dataplex.v1.DataTaxonomy.Builder,
com.google.cloud.dataplex.v1.DataTaxonomyOrBuilder>
dataTaxonomyBuilder_;
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the dataTaxonomy field is set.
*/
public boolean hasDataTaxonomy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The dataTaxonomy.
*/
public com.google.cloud.dataplex.v1.DataTaxonomy getDataTaxonomy() {
if (dataTaxonomyBuilder_ == null) {
return dataTaxonomy_ == null
? com.google.cloud.dataplex.v1.DataTaxonomy.getDefaultInstance()
: dataTaxonomy_;
} else {
return dataTaxonomyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDataTaxonomy(com.google.cloud.dataplex.v1.DataTaxonomy value) {
if (dataTaxonomyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataTaxonomy_ = value;
} else {
dataTaxonomyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDataTaxonomy(
com.google.cloud.dataplex.v1.DataTaxonomy.Builder builderForValue) {
if (dataTaxonomyBuilder_ == null) {
dataTaxonomy_ = builderForValue.build();
} else {
dataTaxonomyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDataTaxonomy(com.google.cloud.dataplex.v1.DataTaxonomy value) {
if (dataTaxonomyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& dataTaxonomy_ != null
&& dataTaxonomy_ != com.google.cloud.dataplex.v1.DataTaxonomy.getDefaultInstance()) {
getDataTaxonomyBuilder().mergeFrom(value);
} else {
dataTaxonomy_ = value;
}
} else {
dataTaxonomyBuilder_.mergeFrom(value);
}
if (dataTaxonomy_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDataTaxonomy() {
bitField0_ = (bitField0_ & ~0x00000002);
dataTaxonomy_ = null;
if (dataTaxonomyBuilder_ != null) {
dataTaxonomyBuilder_.dispose();
dataTaxonomyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.DataTaxonomy.Builder getDataTaxonomyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDataTaxonomyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.DataTaxonomyOrBuilder getDataTaxonomyOrBuilder() {
if (dataTaxonomyBuilder_ != null) {
return dataTaxonomyBuilder_.getMessageOrBuilder();
} else {
return dataTaxonomy_ == null
? com.google.cloud.dataplex.v1.DataTaxonomy.getDefaultInstance()
: dataTaxonomy_;
}
}
/**
*
*
* <pre>
* Required. Only fields specified in `update_mask` are updated.
* </pre>
*
* <code>
* .google.cloud.dataplex.v1.DataTaxonomy data_taxonomy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.DataTaxonomy,
com.google.cloud.dataplex.v1.DataTaxonomy.Builder,
com.google.cloud.dataplex.v1.DataTaxonomyOrBuilder>
getDataTaxonomyFieldBuilder() {
if (dataTaxonomyBuilder_ == null) {
dataTaxonomyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.DataTaxonomy,
com.google.cloud.dataplex.v1.DataTaxonomy.Builder,
com.google.cloud.dataplex.v1.DataTaxonomyOrBuilder>(
getDataTaxonomy(), getParentForChildren(), isClean());
dataTaxonomy_ = null;
}
return dataTaxonomyBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000004);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.UpdateDataTaxonomyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.UpdateDataTaxonomyRequest)
private static final com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest();
}
public static com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDataTaxonomyRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDataTaxonomyRequest>() {
@java.lang.Override
public UpdateDataTaxonomyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDataTaxonomyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDataTaxonomyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.UpdateDataTaxonomyRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 36,252 | clients/google-api-services-drive/v2/1.26.0/com/google/api/services/drive/model/Drive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public Drive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#drive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#drive
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
googleapis/google-api-java-client-services | 36,252 | clients/google-api-services-drive/v2/1.27.0/com/google/api/services/drive/model/Drive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public Drive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#drive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#drive
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
googleapis/google-api-java-client-services | 36,252 | clients/google-api-services-drive/v2/1.28.0/com/google/api/services/drive/model/Drive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public Drive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#drive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#drive
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
googleapis/google-api-java-client-services | 36,252 | clients/google-api-services-drive/v2/1.29.2/com/google/api/services/drive/model/Drive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public Drive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#drive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#drive
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
googleapis/google-api-java-client-services | 36,252 | clients/google-api-services-drive/v2/1.30.1/com/google/api/services/drive/model/Drive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Representation of a shared drive.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Drive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this shared drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* Whether the shared drive is hidden from default view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean hidden;
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this shared drive is
* set. This is a write only field; it can only be set on drive.drives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public Drive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this shared drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this shared drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public Drive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this shared drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this shared drive.
* @param capabilities capabilities or {@code null} for none
*/
public Drive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this shared drive as an RGB hex string. It can only be set on a
* drive.drives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public Drive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the shared drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public Drive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* Whether the shared drive is hidden from default view.
* @return value or {@code null} for none
*/
public java.lang.Boolean getHidden() {
return hidden;
}
/**
* Whether the shared drive is hidden from default view.
* @param hidden hidden or {@code null} for none
*/
public Drive setHidden(java.lang.Boolean hidden) {
this.hidden = hidden;
return this;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this shared drive which is also the ID of the top level folder of this shared drive.
* @param id id or {@code null} for none
*/
public Drive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#drive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#drive
* @param kind kind or {@code null} for none
*/
public Drive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this shared drive.
* @param name name or {@code null} for none
*/
public Drive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
* @param restrictions restrictions or {@code null} for none
*/
public Drive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* driveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.drives.insert request, a random theme is chosen from which the background image and color
* are set. This is a write-only field; it can only be set on requests that don't set colorRgb or
* backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public Drive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public Drive set(String fieldName, Object value) {
return (Drive) super.set(fieldName, value);
}
@Override
public Drive clone() {
return (Drive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this shared drive is set.
* This is a write only field; it can only be set on drive.drives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Google Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Google Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Google Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this shared drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveBackground;
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDriveMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteDrive;
/**
* Whether the current user can download files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this shared drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Whether the current user can rename files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameDrive;
/**
* Whether the current user can share files or folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this shared drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this shared
* drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this shared drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveBackground() {
return canChangeDriveBackground;
}
/**
* Whether the current user can change the background of this shared drive.
* @param canChangeDriveBackground canChangeDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeDriveBackground(java.lang.Boolean canChangeDriveBackground) {
this.canChangeDriveBackground = canChangeDriveBackground;
return this;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDriveMembersOnlyRestriction() {
return canChangeDriveMembersOnlyRestriction;
}
/**
* Whether the current user can change the driveMembersOnly restriction of this shared drive.
* @param canChangeDriveMembersOnlyRestriction canChangeDriveMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDriveMembersOnlyRestriction(java.lang.Boolean canChangeDriveMembersOnlyRestriction) {
this.canChangeDriveMembersOnlyRestriction = canChangeDriveMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this shared drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this shared drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this shared drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteDrive() {
return canDeleteDrive;
}
/**
* Whether the current user can delete this shared drive. Attempting to delete the shared drive
* may still fail if there are untrashed items inside the shared drive.
* @param canDeleteDrive canDeleteDrive or {@code null} for none
*/
public Capabilities setCanDeleteDrive(java.lang.Boolean canDeleteDrive) {
this.canDeleteDrive = canDeleteDrive;
return this;
}
/**
* Whether the current user can download files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this shared drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this shared drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this shared drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this shared drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this shared drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this shared drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this shared drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameDrive() {
return canRenameDrive;
}
/**
* Whether the current user can rename this shared drive.
* @param canRenameDrive canRenameDrive or {@code null} for none
*/
public Capabilities setCanRenameDrive(java.lang.Boolean canRenameDrive) {
this.canRenameDrive = canRenameDrive;
return this;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this shared drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this shared drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this shared drive or items inside this shared drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this shared drive is restricted to its members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean driveMembersOnly;
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this shared drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this shared drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this shared drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this shared drive and items inside this shared drive is restricted to users
* of the domain to which this shared drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this shared drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDriveMembersOnly() {
return driveMembersOnly;
}
/**
* Whether access to items inside this shared drive is restricted to its members.
* @param driveMembersOnly driveMembersOnly or {@code null} for none
*/
public Restrictions setDriveMembersOnly(java.lang.Boolean driveMembersOnly) {
this.driveMembersOnly = driveMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
googleapis/google-cloud-java | 36,164 | java-analytics-data/google-analytics-data/src/test/java/com/google/analytics/data/v1beta/BetaAnalyticsDataClientHttpJsonTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.analytics.data.v1beta;
import static com.google.analytics.data.v1beta.BetaAnalyticsDataClient.ListAudienceExportsPagedResponse;
import com.google.analytics.data.v1beta.stub.HttpJsonBetaAnalyticsDataStub;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Any;
import com.google.protobuf.Timestamp;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class BetaAnalyticsDataClientHttpJsonTest {
private static MockHttpService mockService;
private static BetaAnalyticsDataClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonBetaAnalyticsDataStub.getMethodDescriptors(),
BetaAnalyticsDataSettings.getDefaultEndpoint());
BetaAnalyticsDataSettings settings =
BetaAnalyticsDataSettings.newHttpJsonBuilder()
.setTransportChannelProvider(
BetaAnalyticsDataSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = BetaAnalyticsDataClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void runReportTest() throws Exception {
RunReportResponse expectedResponse =
RunReportResponse.newBuilder()
.addAllDimensionHeaders(new ArrayList<DimensionHeader>())
.addAllMetricHeaders(new ArrayList<MetricHeader>())
.addAllRows(new ArrayList<Row>())
.addAllTotals(new ArrayList<Row>())
.addAllMaximums(new ArrayList<Row>())
.addAllMinimums(new ArrayList<Row>())
.setRowCount(1340416618)
.setMetadata(ResponseMetaData.newBuilder().build())
.setPropertyQuota(PropertyQuota.newBuilder().build())
.setKind("kind3292052")
.build();
mockService.addResponse(expectedResponse);
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.addAllDateRanges(new ArrayList<DateRange>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setOffset(-1019779949)
.setLimit(102976443)
.addAllMetricAggregations(new ArrayList<MetricAggregation>())
.addAllOrderBys(new ArrayList<OrderBy>())
.setCurrencyCode("currencyCode1004773790")
.setCohortSpec(CohortSpec.newBuilder().build())
.setKeepEmptyRows(true)
.setReturnPropertyQuota(true)
.addAllComparisons(new ArrayList<Comparison>())
.build();
RunReportResponse actualResponse = client.runReport(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void runReportExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.addAllDateRanges(new ArrayList<DateRange>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setOffset(-1019779949)
.setLimit(102976443)
.addAllMetricAggregations(new ArrayList<MetricAggregation>())
.addAllOrderBys(new ArrayList<OrderBy>())
.setCurrencyCode("currencyCode1004773790")
.setCohortSpec(CohortSpec.newBuilder().build())
.setKeepEmptyRows(true)
.setReturnPropertyQuota(true)
.addAllComparisons(new ArrayList<Comparison>())
.build();
client.runReport(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void runPivotReportTest() throws Exception {
RunPivotReportResponse expectedResponse =
RunPivotReportResponse.newBuilder()
.addAllPivotHeaders(new ArrayList<PivotHeader>())
.addAllDimensionHeaders(new ArrayList<DimensionHeader>())
.addAllMetricHeaders(new ArrayList<MetricHeader>())
.addAllRows(new ArrayList<Row>())
.addAllAggregates(new ArrayList<Row>())
.setMetadata(ResponseMetaData.newBuilder().build())
.setPropertyQuota(PropertyQuota.newBuilder().build())
.setKind("kind3292052")
.build();
mockService.addResponse(expectedResponse);
RunPivotReportRequest request =
RunPivotReportRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.addAllDateRanges(new ArrayList<DateRange>())
.addAllPivots(new ArrayList<Pivot>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setCurrencyCode("currencyCode1004773790")
.setCohortSpec(CohortSpec.newBuilder().build())
.setKeepEmptyRows(true)
.setReturnPropertyQuota(true)
.addAllComparisons(new ArrayList<Comparison>())
.build();
RunPivotReportResponse actualResponse = client.runPivotReport(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void runPivotReportExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
RunPivotReportRequest request =
RunPivotReportRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.addAllDateRanges(new ArrayList<DateRange>())
.addAllPivots(new ArrayList<Pivot>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setCurrencyCode("currencyCode1004773790")
.setCohortSpec(CohortSpec.newBuilder().build())
.setKeepEmptyRows(true)
.setReturnPropertyQuota(true)
.addAllComparisons(new ArrayList<Comparison>())
.build();
client.runPivotReport(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void batchRunReportsTest() throws Exception {
BatchRunReportsResponse expectedResponse =
BatchRunReportsResponse.newBuilder()
.addAllReports(new ArrayList<RunReportResponse>())
.setKind("kind3292052")
.build();
mockService.addResponse(expectedResponse);
BatchRunReportsRequest request =
BatchRunReportsRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllRequests(new ArrayList<RunReportRequest>())
.build();
BatchRunReportsResponse actualResponse = client.batchRunReports(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void batchRunReportsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BatchRunReportsRequest request =
BatchRunReportsRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllRequests(new ArrayList<RunReportRequest>())
.build();
client.batchRunReports(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void batchRunPivotReportsTest() throws Exception {
BatchRunPivotReportsResponse expectedResponse =
BatchRunPivotReportsResponse.newBuilder()
.addAllPivotReports(new ArrayList<RunPivotReportResponse>())
.setKind("kind3292052")
.build();
mockService.addResponse(expectedResponse);
BatchRunPivotReportsRequest request =
BatchRunPivotReportsRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllRequests(new ArrayList<RunPivotReportRequest>())
.build();
BatchRunPivotReportsResponse actualResponse = client.batchRunPivotReports(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void batchRunPivotReportsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BatchRunPivotReportsRequest request =
BatchRunPivotReportsRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllRequests(new ArrayList<RunPivotReportRequest>())
.build();
client.batchRunPivotReports(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getMetadataTest() throws Exception {
Metadata expectedResponse =
Metadata.newBuilder()
.setName(MetadataName.of("[PROPERTY]").toString())
.addAllDimensions(new ArrayList<DimensionMetadata>())
.addAllMetrics(new ArrayList<MetricMetadata>())
.addAllComparisons(new ArrayList<ComparisonMetadata>())
.build();
mockService.addResponse(expectedResponse);
MetadataName name = MetadataName.of("[PROPERTY]");
Metadata actualResponse = client.getMetadata(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getMetadataExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
MetadataName name = MetadataName.of("[PROPERTY]");
client.getMetadata(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getMetadataTest2() throws Exception {
Metadata expectedResponse =
Metadata.newBuilder()
.setName(MetadataName.of("[PROPERTY]").toString())
.addAllDimensions(new ArrayList<DimensionMetadata>())
.addAllMetrics(new ArrayList<MetricMetadata>())
.addAllComparisons(new ArrayList<ComparisonMetadata>())
.build();
mockService.addResponse(expectedResponse);
String name = "properties/propertie-8635/metadata";
Metadata actualResponse = client.getMetadata(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getMetadataExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "properties/propertie-8635/metadata";
client.getMetadata(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void runRealtimeReportTest() throws Exception {
RunRealtimeReportResponse expectedResponse =
RunRealtimeReportResponse.newBuilder()
.addAllDimensionHeaders(new ArrayList<DimensionHeader>())
.addAllMetricHeaders(new ArrayList<MetricHeader>())
.addAllRows(new ArrayList<Row>())
.addAllTotals(new ArrayList<Row>())
.addAllMaximums(new ArrayList<Row>())
.addAllMinimums(new ArrayList<Row>())
.setRowCount(1340416618)
.setPropertyQuota(PropertyQuota.newBuilder().build())
.setKind("kind3292052")
.build();
mockService.addResponse(expectedResponse);
RunRealtimeReportRequest request =
RunRealtimeReportRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setLimit(102976443)
.addAllMetricAggregations(new ArrayList<MetricAggregation>())
.addAllOrderBys(new ArrayList<OrderBy>())
.setReturnPropertyQuota(true)
.addAllMinuteRanges(new ArrayList<MinuteRange>())
.build();
RunRealtimeReportResponse actualResponse = client.runRealtimeReport(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void runRealtimeReportExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
RunRealtimeReportRequest request =
RunRealtimeReportRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setLimit(102976443)
.addAllMetricAggregations(new ArrayList<MetricAggregation>())
.addAllOrderBys(new ArrayList<OrderBy>())
.setReturnPropertyQuota(true)
.addAllMinuteRanges(new ArrayList<MinuteRange>())
.build();
client.runRealtimeReport(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void checkCompatibilityTest() throws Exception {
CheckCompatibilityResponse expectedResponse =
CheckCompatibilityResponse.newBuilder()
.addAllDimensionCompatibilities(new ArrayList<DimensionCompatibility>())
.addAllMetricCompatibilities(new ArrayList<MetricCompatibility>())
.build();
mockService.addResponse(expectedResponse);
CheckCompatibilityRequest request =
CheckCompatibilityRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setCompatibilityFilter(Compatibility.forNumber(0))
.build();
CheckCompatibilityResponse actualResponse = client.checkCompatibility(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void checkCompatibilityExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CheckCompatibilityRequest request =
CheckCompatibilityRequest.newBuilder()
.setProperty("properties/propertie-2179")
.addAllDimensions(new ArrayList<Dimension>())
.addAllMetrics(new ArrayList<Metric>())
.setDimensionFilter(FilterExpression.newBuilder().build())
.setMetricFilter(FilterExpression.newBuilder().build())
.setCompatibilityFilter(Compatibility.forNumber(0))
.build();
client.checkCompatibility(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createAudienceExportTest() throws Exception {
AudienceExport expectedResponse =
AudienceExport.newBuilder()
.setName(AudienceExportName.of("[PROPERTY]", "[AUDIENCE_EXPORT]").toString())
.setAudience("audience975628804")
.setAudienceDisplayName("audienceDisplayName1537141193")
.addAllDimensions(new ArrayList<AudienceDimension>())
.setBeginCreatingTime(Timestamp.newBuilder().build())
.setCreationQuotaTokensCharged(1232901266)
.setRowCount(1340416618)
.setErrorMessage("errorMessage1203236063")
.setPercentageCompleted(-1292047642)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createAudienceExportTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
PropertyName parent = PropertyName.of("[PROPERTY]");
AudienceExport audienceExport = AudienceExport.newBuilder().build();
AudienceExport actualResponse = client.createAudienceExportAsync(parent, audienceExport).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createAudienceExportExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
PropertyName parent = PropertyName.of("[PROPERTY]");
AudienceExport audienceExport = AudienceExport.newBuilder().build();
client.createAudienceExportAsync(parent, audienceExport).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void createAudienceExportTest2() throws Exception {
AudienceExport expectedResponse =
AudienceExport.newBuilder()
.setName(AudienceExportName.of("[PROPERTY]", "[AUDIENCE_EXPORT]").toString())
.setAudience("audience975628804")
.setAudienceDisplayName("audienceDisplayName1537141193")
.addAllDimensions(new ArrayList<AudienceDimension>())
.setBeginCreatingTime(Timestamp.newBuilder().build())
.setCreationQuotaTokensCharged(1232901266)
.setRowCount(1340416618)
.setErrorMessage("errorMessage1203236063")
.setPercentageCompleted(-1292047642)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createAudienceExportTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
String parent = "properties/propertie-2024";
AudienceExport audienceExport = AudienceExport.newBuilder().build();
AudienceExport actualResponse = client.createAudienceExportAsync(parent, audienceExport).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createAudienceExportExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "properties/propertie-2024";
AudienceExport audienceExport = AudienceExport.newBuilder().build();
client.createAudienceExportAsync(parent, audienceExport).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void queryAudienceExportTest() throws Exception {
QueryAudienceExportResponse expectedResponse =
QueryAudienceExportResponse.newBuilder()
.setAudienceExport(AudienceExport.newBuilder().build())
.addAllAudienceRows(new ArrayList<AudienceRow>())
.setRowCount(1340416618)
.build();
mockService.addResponse(expectedResponse);
String name = "properties/propertie-1136/audienceExports/audienceExport-1136";
QueryAudienceExportResponse actualResponse = client.queryAudienceExport(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void queryAudienceExportExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "properties/propertie-1136/audienceExports/audienceExport-1136";
client.queryAudienceExport(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getAudienceExportTest() throws Exception {
AudienceExport expectedResponse =
AudienceExport.newBuilder()
.setName(AudienceExportName.of("[PROPERTY]", "[AUDIENCE_EXPORT]").toString())
.setAudience("audience975628804")
.setAudienceDisplayName("audienceDisplayName1537141193")
.addAllDimensions(new ArrayList<AudienceDimension>())
.setBeginCreatingTime(Timestamp.newBuilder().build())
.setCreationQuotaTokensCharged(1232901266)
.setRowCount(1340416618)
.setErrorMessage("errorMessage1203236063")
.setPercentageCompleted(-1292047642)
.build();
mockService.addResponse(expectedResponse);
AudienceExportName name = AudienceExportName.of("[PROPERTY]", "[AUDIENCE_EXPORT]");
AudienceExport actualResponse = client.getAudienceExport(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getAudienceExportExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AudienceExportName name = AudienceExportName.of("[PROPERTY]", "[AUDIENCE_EXPORT]");
client.getAudienceExport(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getAudienceExportTest2() throws Exception {
AudienceExport expectedResponse =
AudienceExport.newBuilder()
.setName(AudienceExportName.of("[PROPERTY]", "[AUDIENCE_EXPORT]").toString())
.setAudience("audience975628804")
.setAudienceDisplayName("audienceDisplayName1537141193")
.addAllDimensions(new ArrayList<AudienceDimension>())
.setBeginCreatingTime(Timestamp.newBuilder().build())
.setCreationQuotaTokensCharged(1232901266)
.setRowCount(1340416618)
.setErrorMessage("errorMessage1203236063")
.setPercentageCompleted(-1292047642)
.build();
mockService.addResponse(expectedResponse);
String name = "properties/propertie-1136/audienceExports/audienceExport-1136";
AudienceExport actualResponse = client.getAudienceExport(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getAudienceExportExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name = "properties/propertie-1136/audienceExports/audienceExport-1136";
client.getAudienceExport(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listAudienceExportsTest() throws Exception {
AudienceExport responsesElement = AudienceExport.newBuilder().build();
ListAudienceExportsResponse expectedResponse =
ListAudienceExportsResponse.newBuilder()
.setNextPageToken("")
.addAllAudienceExports(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
PropertyName parent = PropertyName.of("[PROPERTY]");
ListAudienceExportsPagedResponse pagedListResponse = client.listAudienceExports(parent);
List<AudienceExport> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getAudienceExportsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listAudienceExportsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
PropertyName parent = PropertyName.of("[PROPERTY]");
client.listAudienceExports(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listAudienceExportsTest2() throws Exception {
AudienceExport responsesElement = AudienceExport.newBuilder().build();
ListAudienceExportsResponse expectedResponse =
ListAudienceExportsResponse.newBuilder()
.setNextPageToken("")
.addAllAudienceExports(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String parent = "properties/propertie-2024";
ListAudienceExportsPagedResponse pagedListResponse = client.listAudienceExports(parent);
List<AudienceExport> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getAudienceExportsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listAudienceExportsExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "properties/propertie-2024";
client.listAudienceExports(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
oracle/graal | 36,313 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/graphio/GraphProtocol.java | /*
* Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.graphio;
import java.io.Closeable;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.WeakHashMap;
abstract class GraphProtocol<Graph, Node, NodeClass, Edges, Block, ResolvedJavaMethod, ResolvedJavaField, Signature, NodeSourcePosition, Location> implements Closeable {
private static final Charset UTF8 = Charset.forName("UTF-8");
private static final int CONSTANT_POOL_MAX_SIZE = 8000;
private static final int BEGIN_GROUP = 0x00;
private static final int BEGIN_GRAPH = 0x01;
private static final int CLOSE_GROUP = 0x02;
private static final int BEGIN_DOCUMENT = 0x03;
private static final int POOL_NEW = 0x00;
private static final int POOL_STRING = 0x01;
private static final int POOL_ENUM = 0x02;
private static final int POOL_CLASS = 0x03;
private static final int POOL_METHOD = 0x04;
private static final int POOL_NULL = 0x05;
private static final int POOL_NODE_CLASS = 0x06;
private static final int POOL_FIELD = 0x07;
private static final int POOL_SIGNATURE = 0x08;
private static final int POOL_NODE_SOURCE_POSITION = 0x09;
private static final int POOL_NODE = 0x0a;
private static final int PROPERTY_POOL = 0x00;
private static final int PROPERTY_INT = 0x01;
private static final int PROPERTY_LONG = 0x02;
private static final int PROPERTY_DOUBLE = 0x03;
private static final int PROPERTY_FLOAT = 0x04;
private static final int PROPERTY_TRUE = 0x05;
private static final int PROPERTY_FALSE = 0x06;
private static final int PROPERTY_ARRAY = 0x07;
private static final int PROPERTY_SUBGRAPH = 0x08;
private static final int KLASS = 0x00;
private static final int ENUM_KLASS = 0x01;
private static final byte[] MAGIC_BYTES = {'B', 'I', 'G', 'V'};
private static final int MAJOR_VERSION = 8;
private static final int MINOR_VERSION = 0;
private final ConstantPool constantPool;
private final ByteBuffer buffer;
private final WritableByteChannel channel;
private final boolean embedded;
final int versionMajor;
final int versionMinor;
private boolean printing;
GraphProtocol(WritableByteChannel channel, int major, int minor, boolean embedded) throws IOException {
if (major > MAJOR_VERSION || (major == MAJOR_VERSION && minor > MINOR_VERSION)) {
throw new IllegalArgumentException("Unrecognized version " + major + "." + minor);
}
this.versionMajor = major;
this.versionMinor = minor;
this.constantPool = new ConstantPool();
this.buffer = ByteBuffer.allocateDirect(256 * 1024);
this.channel = channel;
this.embedded = embedded;
if (!embedded) {
writeVersion();
flushEmbedded();
}
}
GraphProtocol(GraphProtocol<?, ?, ?, ?, ?, ?, ?, ?, ?, ?> parent) {
this.versionMajor = parent.versionMajor;
this.versionMinor = parent.versionMinor;
this.constantPool = parent.constantPool;
this.buffer = parent.buffer;
this.channel = parent.channel;
this.embedded = parent.embedded;
}
@SuppressWarnings("all")
public final void print(Graph graph, Map<? extends Object, ? extends Object> properties, int id, String format, Object... args) throws IOException {
printing = true;
try {
writeByte(BEGIN_GRAPH);
if (versionMajor >= 3) {
writeInt(id);
writeString(format);
writeInt(args.length);
for (Object a : args) {
writePropertyObject(graph, a);
}
} else {
writePoolObject(formatTitle(graph, id, format, args));
}
writeGraph(graph, properties);
flushEmbedded();
flush();
} finally {
printing = false;
}
}
public final void startDocument(Map<? extends Object, ? extends Object> documentProperties) throws IOException {
if (versionMajor < 7) {
throw new IllegalStateException("Dump properties unsupported in format v." + versionMajor);
}
printing = true;
try {
writeByte(BEGIN_DOCUMENT);
writeProperties(null, documentProperties);
} finally {
printing = false;
}
}
public final void beginGroup(Graph noGraph, String name, String shortName, ResolvedJavaMethod method, int bci, Map<? extends Object, ? extends Object> properties) throws IOException {
printing = true;
try {
writeByte(BEGIN_GROUP);
writePoolObject(name);
writePoolObject(shortName);
writePoolObject(method);
writeInt(bci);
writeProperties(noGraph, properties);
flushEmbedded();
} finally {
printing = false;
}
}
public final void endGroup() throws IOException {
printing = true;
try {
writeByte(CLOSE_GROUP);
flushEmbedded();
} finally {
printing = false;
}
}
final int write(ByteBuffer src) throws IOException {
if (printing) {
throw new IllegalStateException("Trying to write during graph print.");
}
constantPool.reset();
return writeBytesRaw(src);
}
final boolean isOpen() {
return channel.isOpen();
}
@Override
public final void close() {
try {
flush();
channel.close();
} catch (IOException ex) {
throw new Error(ex);
}
}
protected abstract Graph findGraph(Graph current, Object obj);
protected abstract ResolvedJavaMethod findMethod(Object obj);
/**
* Attempts to recognize the provided object as a node. Used to encode it with
* {@link #POOL_NODE} pool type.
*
* @param obj any object
* @return <code>null</code> if it is not a node object, non-null otherwise
*/
protected abstract Node findNode(Object obj);
/**
* Determines whether the provided object is node class or not.
*
* @param obj object to check
* @return {@code null} if {@code obj} does not represent a NodeClass otherwise the NodeClass
* represented by {@code obj}
*/
protected abstract NodeClass findNodeClass(Object obj);
/**
* Returns the NodeClass for a given Node {@code obj}.
*
* @param obj instance of node
* @return non-{@code null} instance of the node's class object
*/
protected abstract NodeClass findClassForNode(Node obj);
/**
* Find a Java class. The returned object must be acceptable by
* {@link #findJavaTypeName(java.lang.Object)} and return valid name for the class.
*
* @param clazz node class object
* @return object representing the class, for example {@link Class}
*/
protected abstract Object findJavaClass(NodeClass clazz);
protected abstract Object findEnumClass(Object enumValue);
protected abstract String findNameTemplate(NodeClass clazz);
protected abstract Edges findClassEdges(NodeClass nodeClass, boolean dumpInputs);
protected abstract int findNodeId(Node n);
protected abstract boolean hasPredecessor(Node node);
protected abstract int findNodesCount(Graph info);
protected abstract Iterable<? extends Node> findNodes(Graph info);
protected abstract void findNodeProperties(Node node, Map<String, Object> props, Graph info);
protected abstract Collection<? extends Node> findBlockNodes(Graph info, Block block);
protected abstract int findBlockId(Block sux);
protected abstract Collection<? extends Block> findBlocks(Graph graph);
protected abstract Collection<? extends Block> findBlockSuccessors(Block block);
protected abstract String formatTitle(Graph graph, int id, String format, Object... args);
protected abstract int findSize(Edges edges);
protected abstract boolean isDirect(Edges edges, int i);
protected abstract String findName(Edges edges, int i);
protected abstract Object findType(Edges edges, int i);
protected abstract Collection<? extends Node> findNodes(Graph graph, Node node, Edges edges, int i);
protected abstract int findEnumOrdinal(Object obj);
protected abstract String[] findEnumTypeValues(Object clazz);
protected abstract String findJavaTypeName(Object obj);
protected abstract byte[] findMethodCode(ResolvedJavaMethod method);
protected abstract int findMethodModifiers(ResolvedJavaMethod method);
protected abstract Signature findMethodSignature(ResolvedJavaMethod method);
protected abstract String findMethodName(ResolvedJavaMethod method);
protected abstract Object findMethodDeclaringClass(ResolvedJavaMethod method);
protected abstract int findFieldModifiers(ResolvedJavaField field);
protected abstract String findFieldTypeName(ResolvedJavaField field);
protected abstract String findFieldName(ResolvedJavaField field);
protected abstract Object findFieldDeclaringClass(ResolvedJavaField field);
protected abstract ResolvedJavaField findJavaField(Object object);
protected abstract Signature findSignature(Object object);
protected abstract int findSignatureParameterCount(Signature signature);
protected abstract String findSignatureParameterTypeName(Signature signature, int index);
protected abstract String findSignatureReturnTypeName(Signature signature);
protected abstract NodeSourcePosition findNodeSourcePosition(Object object);
protected abstract ResolvedJavaMethod findNodeSourcePositionMethod(NodeSourcePosition pos);
protected abstract NodeSourcePosition findNodeSourcePositionCaller(NodeSourcePosition pos);
protected abstract int findNodeSourcePositionBCI(NodeSourcePosition pos);
protected abstract Iterable<Location> findLocation(ResolvedJavaMethod method, int bci, NodeSourcePosition pos);
protected abstract String findLocationFile(Location loc) throws IOException;
protected abstract int findLocationLine(Location loc);
protected abstract URI findLocationURI(Location loc) throws URISyntaxException;
protected abstract String findLocationLanguage(Location loc);
protected abstract int findLocationStart(Location loc);
protected abstract int findLocationEnd(Location loc);
private void writeVersion() throws IOException {
writeBytesRaw(MAGIC_BYTES);
writeByte(versionMajor);
writeByte(versionMinor);
}
private void flushEmbedded() throws IOException {
if (embedded) {
flush();
constantPool.reset();
}
}
private void flush() throws IOException {
buffer.flip();
/*
* Try not to let interrupted threads abort the write. There's still a race here but an
* interrupt that's been pending for a long time shouldn't stop this writing.
*/
boolean interrupted = Thread.interrupted();
try {
channel.write(buffer);
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
buffer.compact();
}
private void ensureAvailable(int i) throws IOException {
assert buffer.capacity() >= i : "Can not make " + i + " bytes available, buffer is too small";
while (buffer.remaining() < i) {
flush();
}
}
private void writeByte(int b) throws IOException {
ensureAvailable(1);
buffer.put((byte) b);
}
private void writeInt(int b) throws IOException {
ensureAvailable(4);
buffer.putInt(b);
}
private void writeLong(long b) throws IOException {
ensureAvailable(8);
buffer.putLong(b);
}
private void writeDouble(double b) throws IOException {
ensureAvailable(8);
buffer.putDouble(b);
}
private void writeFloat(float b) throws IOException {
ensureAvailable(4);
buffer.putFloat(b);
}
private void writeShort(char b) throws IOException {
ensureAvailable(2);
buffer.putChar(b);
}
private void writeString(String str) throws IOException {
byte[] bytes = str.getBytes(UTF8);
writeBytes(bytes);
}
private void writeBytes(byte[] b) throws IOException {
if (b == null) {
writeInt(-1);
} else {
writeInt(b.length);
writeBytesRaw(b);
}
}
private void writeBytesRaw(byte[] b) throws IOException {
int bytesWritten = 0;
while (bytesWritten < b.length) {
int toWrite = Math.min(b.length - bytesWritten, buffer.capacity());
ensureAvailable(toWrite);
buffer.put(b, bytesWritten, toWrite);
bytesWritten += toWrite;
}
}
private int writeBytesRaw(ByteBuffer b) throws IOException {
int limit = b.limit();
int written = 0;
while (b.position() < limit) {
int toWrite = Math.min(limit - b.position(), buffer.capacity());
ensureAvailable(toWrite);
b.limit(b.position() + toWrite);
try {
buffer.put(b);
written += toWrite;
} finally {
b.limit(limit);
}
}
return written;
}
private void writeInts(int[] b) throws IOException {
if (b == null) {
writeInt(-1);
} else {
writeInt(b.length);
int sizeInBytes = b.length * 4;
ensureAvailable(sizeInBytes);
buffer.asIntBuffer().put(b);
buffer.position(buffer.position() + sizeInBytes);
}
}
private void writeDoubles(double[] b) throws IOException {
if (b == null) {
writeInt(-1);
} else {
writeInt(b.length);
int sizeInBytes = b.length * 8;
ensureAvailable(sizeInBytes);
buffer.asDoubleBuffer().put(b);
buffer.position(buffer.position() + sizeInBytes);
}
}
private void writePoolObject(Object obj) throws IOException {
Object object = obj;
if (object == null) {
writeByte(POOL_NULL);
return;
}
Object[] found = new Object[1];
int type = findPoolType(object, found);
Character id = constantPool.get(object, type);
if (id == null) {
addPoolEntry(object, type, found);
} else {
writeByte(type);
writeShort(id.charValue());
}
}
private int findPoolType(Object obj, Object[] found) throws IOException {
Object object = obj;
if (object == null) {
return POOL_NULL;
}
if (isFound(findJavaField(object), found)) {
return POOL_FIELD;
} else if (isFound(findSignature(object), found)) {
return POOL_SIGNATURE;
} else if (versionMajor >= 4 && isFound(findNodeSourcePosition(object), found)) {
return POOL_NODE_SOURCE_POSITION;
} else {
final Node node = findNode(object);
if (versionMajor == 4 && node != null) {
object = classForNode(node);
}
if (isFound(findNodeClass(object), found)) {
return POOL_NODE_CLASS;
} else if (versionMajor >= 5 && isFound(node, found)) {
return POOL_NODE;
} else if (isFound(findMethod(object), found)) {
return POOL_METHOD;
} else if (object instanceof Enum<?>) {
if (found != null) {
found[0] = ((Enum<?>) object).ordinal();
}
return POOL_ENUM;
} else {
int val = findEnumOrdinal(object);
if (val >= 0) {
if (found != null) {
found[0] = val;
}
return POOL_ENUM;
} else if (object instanceof Class<?>) {
if (found != null) {
found[0] = ((Class<?>) object).getName();
}
return POOL_CLASS;
} else if (isFound(findJavaTypeName(object), found)) {
return POOL_CLASS;
} else {
return POOL_STRING;
}
}
}
}
private void writeGraph(Graph graph, Map<? extends Object, ? extends Object> properties) throws IOException {
writeProperties(graph, properties);
writeNodes(graph);
writeBlocks(findBlocks(graph), graph);
}
private void writeNodes(Graph info) throws IOException {
Map<String, Object> props = new LinkedHashMap<>();
final int size = findNodesCount(info);
writeInt(size);
int cnt = 0;
for (Node node : findNodes(info)) {
NodeClass nodeClass = classForNode(node);
findNodeProperties(node, props, info);
writeInt(findNodeId(node));
writePoolObject(nodeClass);
writeByte(hasPredecessor(node) ? 1 : 0);
writeProperties(info, props);
writeEdges(info, node, true);
writeEdges(info, node, false);
props.clear();
cnt++;
}
if (size != cnt) {
throw new IOException("Expecting " + size + " nodes, but found " + cnt);
}
}
private void writeEdges(Graph graph, Node node, boolean dumpInputs) throws IOException {
NodeClass clazz = classForNode(node);
Edges edges = findClassEdges(clazz, dumpInputs);
int size = findSize(edges);
for (int i = 0; i < size; i++) {
Collection<? extends Node> list = findNodes(graph, node, edges, i);
if (isDirect(edges, i)) {
if (list != null && list.size() != 1) {
throw new IOException("Edge " + i + " in " + edges + " is direct, but list isn't singleton: " + list);
}
Node n = null;
if (list != null && !list.isEmpty()) {
n = list.iterator().next();
}
writeNodeRef(n);
} else {
if (list == null) {
writeShort((char) 0);
} else {
int listSize = list.size();
if (listSize != ((char) listSize)) {
throw new IOException("Too many nodes in list: " + list.size());
}
writeShort((char) listSize);
for (Node edge : list) {
writeNodeRef(edge);
}
}
}
}
}
private NodeClass classForNode(Node node) throws IOException {
NodeClass clazz = findClassForNode(node);
if (clazz == null) {
throw new IOException("No class for " + node);
}
return clazz;
}
private void writeNodeRef(Node node) throws IOException {
writeInt(findNodeId(node));
}
private void writeBlocks(Collection<? extends Block> blocks, Graph info) throws IOException {
if (blocks != null) {
for (Block block : blocks) {
Collection<? extends Node> nodes = findBlockNodes(info, block);
if (nodes == null) {
writeInt(0);
return;
}
}
writeInt(blocks.size());
for (Block block : blocks) {
Collection<? extends Node> nodes = findBlockNodes(info, block);
writeInt(findBlockId(block));
writeInt(nodes.size());
for (Node node : nodes) {
writeInt(findNodeId(node));
}
final Collection<? extends Block> successors = findBlockSuccessors(block);
writeInt(successors.size());
for (Block sux : successors) {
writeInt(findBlockId(sux));
}
}
} else {
writeInt(0);
}
}
private void writeEdgesInfo(NodeClass nodeClass, boolean dumpInputs) throws IOException {
Edges edges = findClassEdges(nodeClass, dumpInputs);
int size = findSize(edges);
writeShort((char) size);
for (int i = 0; i < size; i++) {
writeByte(isDirect(edges, i) ? 0 : 1);
writePoolObject(findName(edges, i));
if (dumpInputs) {
writePoolObject(findType(edges, i));
}
}
}
@SuppressWarnings("unchecked")
private void addPoolEntry(Object obj, int type, Object[] found) throws IOException {
Object object = obj;
char index = constantPool.add(object, type);
writeByte(POOL_NEW);
writeShort(index);
writeByte(type);
switch (type) {
case POOL_FIELD: {
ResolvedJavaField field = (ResolvedJavaField) found[0];
Objects.requireNonNull(field);
writePoolObject(findFieldDeclaringClass(field));
writePoolObject(findFieldName(field));
writePoolObject(findFieldTypeName(field));
writeInt(findFieldModifiers(field));
break;
}
case POOL_SIGNATURE: {
Signature signature = (Signature) found[0];
int args = findSignatureParameterCount(signature);
writeShort((char) args);
for (int i = 0; i < args; i++) {
writePoolObject(findSignatureParameterTypeName(signature, i));
}
writePoolObject(findSignatureReturnTypeName(signature));
break;
}
case POOL_NODE_SOURCE_POSITION: {
NodeSourcePosition pos = (NodeSourcePosition) found[0];
Objects.requireNonNull(pos);
ResolvedJavaMethod method = findNodeSourcePositionMethod(pos);
writePoolObject(method);
final int bci = findNodeSourcePositionBCI(pos);
writeInt(bci);
Iterator<Location> ste = findLocation(method, bci, pos).iterator();
if (versionMajor >= 6) {
while (ste.hasNext()) {
Location loc = ste.next();
URI uri;
try {
uri = findLocationURI(loc);
} catch (URISyntaxException ex) {
throw new IOException(ex);
}
if (uri == null) {
continue;
}
String l = findLocationLanguage(loc);
if (l == null) {
continue;
}
writePoolObject(uri.toString());
writeString(l);
writeInt(findLocationLine(loc));
writeInt(findLocationStart(loc));
writeInt(findLocationEnd(loc));
}
writePoolObject(null);
} else {
Location first = ste.hasNext() ? ste.next() : null;
String fileName = first != null ? findLocationFile(first) : null;
if (fileName != null) {
writePoolObject(fileName);
writeInt(findLocationLine(first));
} else {
writePoolObject(null);
}
}
writePoolObject(findNodeSourcePositionCaller(pos));
break;
}
case POOL_NODE: {
Node node = (Node) found[0];
Objects.requireNonNull(node);
writeInt(findNodeId(node));
writePoolObject(classForNode(node));
break;
}
case POOL_NODE_CLASS: {
NodeClass nodeClass = (NodeClass) found[0];
final Object clazz = findJavaClass(nodeClass);
if (versionMajor >= 3) {
writePoolObject(clazz);
writeString(findNameTemplate(nodeClass));
} else {
writeString(((Class<?>) clazz).getSimpleName());
String nameTemplate = findNameTemplate(nodeClass);
writeString(nameTemplate);
}
writeEdgesInfo(nodeClass, true);
writeEdgesInfo(nodeClass, false);
break;
}
case POOL_CLASS: {
String typeName = (String) found[0];
Objects.requireNonNull(typeName);
writeString(typeName);
String[] enumValueNames = findEnumTypeValues(object);
if (enumValueNames != null) {
writeByte(ENUM_KLASS);
writeInt(enumValueNames.length);
for (String o : enumValueNames) {
writePoolObject(o);
}
} else {
writeByte(KLASS);
}
break;
}
case POOL_METHOD: {
ResolvedJavaMethod method = (ResolvedJavaMethod) found[0];
Objects.requireNonNull(method);
writePoolObject(findMethodDeclaringClass(method));
writePoolObject(findMethodName(method));
final Signature methodSignature = findMethodSignature(method);
if (findSignature(methodSignature) == null) {
throw new IOException("Should be recognized as signature: " + methodSignature + " for " + method);
}
writePoolObject(methodSignature);
writeInt(findMethodModifiers(method));
writeBytes(findMethodCode(method));
break;
}
case POOL_ENUM: {
int enumOrdinal = (int) found[0];
writePoolObject(findEnumClass(object));
writeInt(enumOrdinal);
break;
}
case POOL_STRING: {
writeString(object.toString());
break;
}
default:
throw new IllegalStateException();
}
}
private void writePropertyObject(Graph graph, Object obj) throws IOException {
if (obj instanceof Integer) {
writeByte(PROPERTY_INT);
writeInt(((Integer) obj).intValue());
} else if (obj instanceof Long) {
writeByte(PROPERTY_LONG);
writeLong(((Long) obj).longValue());
} else if (obj instanceof Double) {
writeByte(PROPERTY_DOUBLE);
writeDouble(((Double) obj).doubleValue());
} else if (obj instanceof Float) {
writeByte(PROPERTY_FLOAT);
writeFloat(((Float) obj).floatValue());
} else if (obj instanceof Boolean) {
if (((Boolean) obj).booleanValue()) {
writeByte(PROPERTY_TRUE);
} else {
writeByte(PROPERTY_FALSE);
}
} else if (obj != null && obj.getClass().isArray()) {
Class<?> componentType = obj.getClass().getComponentType();
if (componentType.isPrimitive()) {
if (componentType == Double.TYPE) {
writeByte(PROPERTY_ARRAY);
writeByte(PROPERTY_DOUBLE);
writeDoubles((double[]) obj);
} else if (componentType == Integer.TYPE) {
writeByte(PROPERTY_ARRAY);
writeByte(PROPERTY_INT);
writeInts((int[]) obj);
} else {
writeByte(PROPERTY_POOL);
writePoolObject(obj);
}
} else {
writeByte(PROPERTY_ARRAY);
writeByte(PROPERTY_POOL);
Object[] array = (Object[]) obj;
writeInt(array.length);
for (Object o : array) {
writePoolObject(o);
}
}
} else {
Graph g = findGraph(graph, obj);
if (g == null) {
writeByte(PROPERTY_POOL);
writePoolObject(obj);
} else {
writeByte(PROPERTY_SUBGRAPH);
writeGraph(g, null);
}
}
}
private void writeProperties(Graph graph, Map<? extends Object, ? extends Object> props) throws IOException {
if (props == null) {
writeShort((char) 0);
return;
}
final int size = props.size();
// properties
if (size >= Character.MAX_VALUE) {
if (versionMajor > 7) {
writeShort(Character.MAX_VALUE);
writeInt(size);
} else {
throw new IllegalArgumentException("Property count is too big. Properties can contain only " + (Character.MAX_VALUE - 1) + " in version < 8.");
}
} else {
writeShort((char) size);
}
int cnt = 0;
for (Map.Entry<? extends Object, ? extends Object> entry : props.entrySet()) {
String key = entry.getKey().toString();
writePoolObject(key);
writePropertyObject(graph, entry.getValue());
cnt++;
}
if (size != cnt) {
throw new IOException("Expecting " + size + " properties, but found only " + cnt);
}
}
private static boolean isFound(Object obj, Object[] found) {
if (obj == null) {
return false;
}
if (found != null) {
found[0] = obj;
}
return true;
}
private static Set<Class<?>> badToString;
/**
* This is a helper to identify objects that are encoded as POOL_STRING and have a poor
* {@link Object#toString()} implementation where two objects that are
* {@link Object#equals(Object)} have different String representations. Only the first mismatch
* is reported since this is a systematic issue and reporting every failure would be too much
* useless output.
*/
private static synchronized void reportBadToString(Object lookupKey, Object value) {
if (badToString == null) {
badToString = new LinkedHashSet<>();
}
if (badToString.add(lookupKey.getClass())) {
System.err.println("GraphProtocol: toString mismatch for " + lookupKey.getClass() + ": " + value + " != " + lookupKey.toString());
}
}
private static boolean checkToString(Object lookupKey, Object value) {
if (!lookupKey.toString().equals(value)) {
reportBadToString(lookupKey, value);
}
return true;
}
/**
* This class maintains a limited pool of constants for use by the graph protocol. Once the
* cache fills up the oldest slots are replaced with new values in a cyclic fashion.
*/
private static final class ConstantPool {
private char nextId;
/*
* A mapping from an object to the pool entry that represents it. Normally the value is the
* Character id of the entry but for {@link POOL_STRING} entries a second forwarding entry
* might be created. A {@link POOL_STRING} can be looked up either by the original object or
* by the toString representation of that object. To handle this case the original object is
* inserted with the toString as the value. That string should then be looked up to get the
* actual id. This is done to avoid excessive toString calls during encoding.
*/
private final WeakHashMap<Object, Object> map = new WeakHashMap<>();
private final Object[] keys = new Object[CONSTANT_POOL_MAX_SIZE];
ConstantPool() {
}
private static Object getLookupKey(Object key) {
// Collections must be converted to a String early since they can be mutated after
// being inserted into the map.
return (key instanceof Collection) ? key.toString() : key;
}
Character get(Object initialKey, int type) {
Object key = getLookupKey(initialKey);
Object value = map.get(key);
if (value instanceof String) {
Character id = (Character) map.get(value);
if (id != null && keys[id].equals(value)) {
assert checkToString(key, value);
return id;
}
value = null;
}
Character id = (Character) value;
if (id != null && keys[id].equals(key)) {
return id;
}
if (type == POOL_STRING && !(key instanceof String)) {
// See if the String representation is already in the map
String string = key.toString();
id = get(string, type);
if (id != null) {
// Add an entry that forwards from the object to the string.
map.put(key, string);
return id;
}
}
return null;
}
char add(Object initialKey, int type) {
char id = nextId++;
if (nextId == CONSTANT_POOL_MAX_SIZE) {
nextId = 0;
}
if (keys[id] != null) {
map.remove(keys[id]);
}
Object key = getLookupKey(initialKey);
if (type == POOL_STRING && !(key instanceof String)) {
// Insert a forwarding entry from the original object to the string representation
// and then directly insert the string with the pool id.
String string = key.toString();
map.put(key, string);
map.put(string, id);
keys[id] = string;
} else {
map.put(key, id);
keys[id] = key;
}
return id;
}
void reset() {
map.clear();
Arrays.fill(keys, null);
nextId = 0;
}
}
}
|
oracle/graalpython | 36,524 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/modules/cext/PythonCextSlotBuiltins.java | /*
* Copyright (c) 2021, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.graal.python.builtins.modules.cext;
import static com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiCallPath.Ignored;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.ConstCharPtrAsTruffleString;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Int;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Pointer;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyASCIIObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyByteArrayObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyCFunctionObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyCMethodObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyDescrObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyFrameObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyGetSetDef;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyInstanceMethodObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyListObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyLongObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyMethodDef;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyMethodDescrObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyMethodObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyModuleDef;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyModuleObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectBorrowed;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectPtr;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyObjectWrapper;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PySetObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PySliceObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyTupleObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyTypeObjectBorrowed;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyUnicodeObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.PyVarObject;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Py_ssize_t;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.UINTPTR_T;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.UNSIGNED_INT;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.Void;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.getter;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.setter;
import static com.oracle.graal.python.builtins.objects.cext.capi.transitions.ArgDescriptor.vectorcallfunc;
import static com.oracle.graal.python.nodes.HiddenAttr.METHOD_DEF_PTR;
import static com.oracle.graal.python.nodes.HiddenAttr.PROMOTED_START;
import static com.oracle.graal.python.nodes.HiddenAttr.PROMOTED_STEP;
import static com.oracle.graal.python.nodes.HiddenAttr.PROMOTED_STOP;
import static com.oracle.graal.python.nodes.SpecialAttributeNames.T___MODULE__;
import static com.oracle.graal.python.util.PythonUtils.TS_ENCODING;
import com.oracle.graal.python.builtins.PythonBuiltinClassType;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiBinaryBuiltinNode;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiBuiltin;
import com.oracle.graal.python.builtins.modules.cext.PythonCextBuiltins.CApiUnaryBuiltinNode;
import com.oracle.graal.python.builtins.objects.PNone;
import com.oracle.graal.python.builtins.objects.bytes.PByteArray;
import com.oracle.graal.python.builtins.objects.cext.capi.CApiContext;
import com.oracle.graal.python.builtins.objects.cext.capi.CExtNodes;
import com.oracle.graal.python.builtins.objects.cext.capi.CExtNodes.AsCharPointerNode;
import com.oracle.graal.python.builtins.objects.cext.capi.CExtNodes.ObSizeNode;
import com.oracle.graal.python.builtins.objects.cext.capi.PyMethodDefHelper;
import com.oracle.graal.python.builtins.objects.cext.capi.PySequenceArrayWrapper;
import com.oracle.graal.python.builtins.objects.cext.capi.PythonNativeWrapper.PythonAbstractObjectNativeWrapper;
import com.oracle.graal.python.builtins.objects.cext.structs.CStructAccess;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageLen;
import com.oracle.graal.python.builtins.objects.frame.PFrame;
import com.oracle.graal.python.builtins.objects.function.PBuiltinFunction;
import com.oracle.graal.python.builtins.objects.getsetdescriptor.GetSetDescriptor;
import com.oracle.graal.python.builtins.objects.ints.PInt;
import com.oracle.graal.python.builtins.objects.method.PBuiltinMethod;
import com.oracle.graal.python.builtins.objects.method.PDecoratedMethod;
import com.oracle.graal.python.builtins.objects.method.PMethod;
import com.oracle.graal.python.builtins.objects.module.PythonModule;
import com.oracle.graal.python.builtins.objects.object.PythonBuiltinObject;
import com.oracle.graal.python.builtins.objects.object.PythonObject;
import com.oracle.graal.python.builtins.objects.set.PBaseSet;
import com.oracle.graal.python.builtins.objects.slice.PSlice;
import com.oracle.graal.python.builtins.objects.str.NativeStringData;
import com.oracle.graal.python.builtins.objects.str.PString;
import com.oracle.graal.python.builtins.objects.str.StringNodes;
import com.oracle.graal.python.builtins.objects.str.StringNodes.StringLenNode;
import com.oracle.graal.python.builtins.objects.type.TypeNodes;
import com.oracle.graal.python.lib.PyObjectLookupAttr;
import com.oracle.graal.python.lib.PyObjectSetAttr;
import com.oracle.graal.python.nodes.HiddenAttr;
import com.oracle.graal.python.nodes.PGuards;
import com.oracle.graal.python.nodes.attributes.GetFixedAttributeNode;
import com.oracle.graal.python.nodes.object.GetClassNode;
import com.oracle.graal.python.runtime.PythonContext;
import com.oracle.graal.python.runtime.sequence.PSequence;
import com.oracle.graal.python.runtime.sequence.storage.NativeByteSequenceStorage;
import com.oracle.graal.python.runtime.sequence.storage.NativeObjectSequenceStorage;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Cached;
import com.oracle.truffle.api.dsl.Cached.Exclusive;
import com.oracle.truffle.api.dsl.GenerateCached;
import com.oracle.truffle.api.dsl.GenerateInline;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.profiles.InlinedConditionProfile;
import com.oracle.truffle.api.strings.TruffleString;
public final class PythonCextSlotBuiltins {
@CApiBuiltin(name = "GraalPyPrivate_Get_PyListObject_ob_item", ret = PyObjectPtr, args = {PyListObject}, call = Ignored)
@CApiBuiltin(name = "GraalPyPrivate_Get_PyTupleObject_ob_item", ret = PyObjectPtr, args = {PyTupleObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PSequence_ob_item extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PSequence object) {
assert !(object.getSequenceStorage() instanceof NativeByteSequenceStorage);
return PySequenceArrayWrapper.ensureNativeSequence(object);
}
}
@CApiBuiltin(ret = Py_ssize_t, args = {PyASCIIObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyASCIIObject_length extends CApiUnaryBuiltinNode {
@Specialization
static long get(Object object,
@Cached StringLenNode stringLenNode) {
return stringLenNode.execute(object);
}
}
@CApiBuiltin(ret = UNSIGNED_INT, args = {PyASCIIObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyASCIIObject_state_ascii extends CApiUnaryBuiltinNode {
@Specialization
int get(PString object,
@Bind Node inliningTarget,
@Cached InlinedConditionProfile storageProfile,
@Cached HiddenAttr.ReadNode readAttrNode,
@Cached TruffleString.GetCodeRangeNode getCodeRangeNode) {
// important: avoid materialization of native sequences
NativeStringData nativeData = object.getNativeStringData(inliningTarget, readAttrNode);
if (storageProfile.profile(inliningTarget, nativeData != null)) {
return nativeData.isAscii() ? 1 : 0;
}
TruffleString string = object.getMaterialized();
return PInt.intValue(getCodeRangeNode.execute(string, TS_ENCODING) == TruffleString.CodeRange.ASCII);
}
}
@CApiBuiltin(ret = UNSIGNED_INT, args = {PyASCIIObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyASCIIObject_state_compact extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
return 0;
}
}
@CApiBuiltin(ret = UNSIGNED_INT, args = {PyASCIIObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyASCIIObject_state_interned extends CApiUnaryBuiltinNode {
@Specialization
static int get(PString object,
@Bind Node inliningTarget,
@Cached StringNodes.IsInternedStringNode isInternedStringNode) {
return isInternedStringNode.execute(inliningTarget, object) ? 1 : 0;
}
}
@CApiBuiltin(ret = UNSIGNED_INT, args = {PyASCIIObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyASCIIObject_state_kind extends CApiUnaryBuiltinNode {
@Specialization
static int get(PString object,
@Bind Node inliningTarget,
@Cached InlinedConditionProfile storageProfile,
@Cached HiddenAttr.ReadNode readAttrNode,
@Cached TruffleString.GetCodeRangeNode getCodeRangeNode) {
// important: avoid materialization of native sequences
NativeStringData nativeData = object.getNativeStringData(inliningTarget, readAttrNode);
if (storageProfile.profile(inliningTarget, nativeData != null)) {
return nativeData.getCharSize();
}
TruffleString string = object.getMaterialized();
TruffleString.CodeRange range = getCodeRangeNode.execute(string, TS_ENCODING);
if (range.isSubsetOf(TruffleString.CodeRange.LATIN_1)) {
return 1;
} else if (range.isSubsetOf(TruffleString.CodeRange.BMP)) {
return 2;
} else {
return 4;
}
}
}
@CApiBuiltin(ret = UNSIGNED_INT, args = {PyASCIIObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyASCIIObject_state_ready extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
return 1;
}
}
@CApiBuiltin(ret = PyTypeObjectBorrowed, args = {PyCMethodObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyCMethodObject_mm_class extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PBuiltinMethod object) {
return object.getClassObject();
}
}
@CApiBuiltin(ret = PyMethodDef, args = {PyCFunctionObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyCFunctionObject_m_ml extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PythonBuiltinObject object,
@Bind Node inliningTarget,
@Cached HiddenAttr.ReadNode readNode) {
PBuiltinFunction resolved;
if (object instanceof PBuiltinMethod builtinMethod) {
resolved = builtinMethod.getBuiltinFunction();
} else if (object instanceof PBuiltinFunction builtinFunction) {
resolved = builtinFunction;
} else {
CompilerDirectives.transferToInterpreterAndInvalidate();
throw CompilerDirectives.shouldNotReachHere("requesting PyMethodDef for an incompatible function/method type: " + object.getClass().getSimpleName());
}
Object methodDefPtr = readNode.execute(inliningTarget, resolved, METHOD_DEF_PTR, null);
if (methodDefPtr != null) {
return methodDefPtr;
}
CApiContext cApiContext = getCApiContext(inliningTarget);
return PyMethodDefHelper.create(cApiContext, resolved);
}
}
@CApiBuiltin(ret = PyMethodDef, args = {PyCFunctionObject, PyMethodDef}, call = Ignored)
abstract static class GraalPyPrivate_Set_PyCFunctionObject_m_ml extends CApiBinaryBuiltinNode {
@Specialization
static Object get(PythonBuiltinObject object, Object methodDefPtr,
@Bind Node inliningTarget,
@Cached HiddenAttr.WriteNode writeNode) {
PBuiltinFunction resolved;
if (object instanceof PBuiltinMethod builtinMethod) {
resolved = builtinMethod.getBuiltinFunction();
} else if (object instanceof PBuiltinFunction builtinFunction) {
resolved = builtinFunction;
} else {
CompilerDirectives.transferToInterpreterAndInvalidate();
throw CompilerDirectives.shouldNotReachHere("writing PyMethodDef for an incompatible function/method type: " + object.getClass().getSimpleName());
}
writeNode.execute(inliningTarget, resolved, METHOD_DEF_PTR, methodDefPtr);
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyCFunctionObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyCFunctionObject_m_module extends CApiUnaryBuiltinNode {
@Specialization
Object get(Object object,
@Bind Node inliningTarget,
@Cached PyObjectLookupAttr lookup) {
Object module = lookup.execute(null, inliningTarget, object, T___MODULE__);
return module != PNone.NO_VALUE ? module : getNativeNull();
}
}
@CApiBuiltin(ret = Void, args = {PyCFunctionObject, PyObjectBorrowed}, call = Ignored)
abstract static class GraalPyPrivate_Set_PyCFunctionObject_m_module extends CApiBinaryBuiltinNode {
@Specialization
Object set(Object object, Object value,
@Bind Node inliningTarget,
@Cached PyObjectSetAttr setattr) {
setattr.execute(null, inliningTarget, object, T___MODULE__, value);
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyCFunctionObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyCFunctionObject_m_self extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PBuiltinMethod object) {
return object.getSelf();
}
@Specialization
static Object get(PMethod object) {
return object.getSelf();
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyCFunctionObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyCFunctionObject_m_weakreflist extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = vectorcallfunc, args = {PyCFunctionObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyCFunctionObject_vectorcall extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = Pointer, args = {PyByteArrayObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyByteArrayObject_ob_start extends CApiUnaryBuiltinNode {
@Specialization
static Object doObStart(PByteArray object) {
assert !(object.getSequenceStorage() instanceof NativeObjectSequenceStorage);
return PySequenceArrayWrapper.ensureNativeSequence(object);
}
}
@CApiBuiltin(ret = Py_ssize_t, args = {PyByteArrayObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyByteArrayObject_ob_exports extends CApiUnaryBuiltinNode {
@Specialization
static long get(PByteArray object) {
return object.getExports();
}
}
@CApiBuiltin(ret = Void, args = {PyByteArrayObject, Int}, call = Ignored)
abstract static class GraalPyPrivate_Set_PyByteArrayObject_ob_exports extends CApiBinaryBuiltinNode {
@Specialization
static Object set(PByteArray object, int value) {
object.setExports(value);
return PNone.NO_VALUE;
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyDescrObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyDescrObject_d_name extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PBuiltinFunction object) {
return object.getCApiName();
}
@Specialization
static Object get(GetSetDescriptor object) {
return object.getCApiName();
}
}
@CApiBuiltin(ret = PyTypeObjectBorrowed, args = {PyDescrObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyDescrObject_d_type extends CApiUnaryBuiltinNode {
@Specialization
Object get(PBuiltinFunction object) {
Object enclosingType = object.getEnclosingType();
return enclosingType != null ? enclosingType : getNativeNull();
}
@Specialization
static Object get(GetSetDescriptor object) {
return object.getType();
}
}
@CApiBuiltin(ret = Int, args = {PyFrameObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyFrameObject_f_lineno extends CApiUnaryBuiltinNode {
@Specialization
static int get(PFrame frame) {
return frame.getLine();
}
}
@CApiBuiltin(ret = Pointer, args = {PyGetSetDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyGetSetDef_closure extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = ConstCharPtrAsTruffleString, args = {PyGetSetDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyGetSetDef_doc extends CApiUnaryBuiltinNode {
@Specialization
Object get(PythonObject object,
@Cached(parameters = "T___DOC__") GetFixedAttributeNode getAttrNode,
@Cached AsCharPointerNode asCharPointerNode) {
Object doc = getAttrNode.execute(null, object);
if (PGuards.isPNone(doc)) {
return getNULL();
} else {
return asCharPointerNode.execute(doc);
}
}
}
@CApiBuiltin(ret = getter, args = {PyGetSetDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyGetSetDef_get extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = ConstCharPtrAsTruffleString, args = {PyGetSetDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyGetSetDef_name extends CApiUnaryBuiltinNode {
@Specialization
Object get(PythonObject object,
@Cached(parameters = "T___NAME__") GetFixedAttributeNode getAttrNode,
@Cached AsCharPointerNode asCharPointerNode) {
Object name = getAttrNode.execute(null, object);
if (PGuards.isPNone(name)) {
return getNULL();
} else {
return asCharPointerNode.execute(name);
}
}
}
@CApiBuiltin(ret = setter, args = {PyGetSetDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyGetSetDef_set extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = PyMethodDef, args = {PyMethodDescrObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyMethodDescrObject_d_method extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PBuiltinFunction builtinFunction,
@Bind Node inliningTarget,
@Cached HiddenAttr.ReadNode readNode) {
Object methodDefPtr = readNode.execute(inliningTarget, builtinFunction, METHOD_DEF_PTR, null);
if (methodDefPtr != null) {
return methodDefPtr;
}
/*
* Note: 'PBuiltinFunction' is the only Java class we use to represent a
* 'method_descriptor' (CPython type 'PyMethodDescr_Type').
*/
return PyMethodDefHelper.create(getCApiContext(inliningTarget), builtinFunction);
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyInstanceMethodObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyInstanceMethodObject_func extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PDecoratedMethod object) {
return object.getCallable();
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyMethodObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyMethodObject_im_func extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PBuiltinMethod object) {
return object.getFunction();
}
@Specialization
static Object get(PMethod object) {
return object.getFunction();
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyMethodObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyMethodObject_im_self extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PBuiltinMethod object) {
return object.getSelf();
}
@Specialization
static Object get(PMethod object) {
return object.getSelf();
}
}
@CApiBuiltin(ret = ConstCharPtrAsTruffleString, args = {PyModuleDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleDef_m_doc extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = PyMethodDef, args = {PyModuleDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleDef_m_methods extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = ConstCharPtrAsTruffleString, args = {PyModuleDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleDef_m_name extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = Py_ssize_t, args = {PyModuleDef}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleDef_m_size extends CApiUnaryBuiltinNode {
@Specialization
static int get(@SuppressWarnings("unused") Object object) {
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = PyModuleDef, args = {PyModuleObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleObject_md_def extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PythonModule object) {
return object.getNativeModuleDef();
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PyModuleObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleObject_md_dict extends CApiUnaryBuiltinNode {
@Specialization
static Object get(Object object,
@Exclusive @Cached(parameters = "T___DICT__") GetFixedAttributeNode getDictNode) {
return getDictNode.execute(null, object);
}
}
@CApiBuiltin(ret = Pointer, args = {PyModuleObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyModuleObject_md_state extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PythonModule object,
@Bind Node inliningTarget) {
return object.getNativeModuleState() != null ? object.getNativeModuleState() : PythonContext.get(inliningTarget).getNativeNull();
}
}
@CApiBuiltin(ret = Py_ssize_t, args = {PyObjectWrapper}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyObject_ob_refcnt extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PythonAbstractObjectNativeWrapper wrapper) {
/*
* We are allocating native object stubs for each wrapper. Therefore, reference counting
* should only be done on the native side. However, we allow access for debugging
* purposes.
*/
if (PythonContext.DEBUG_CAPI) {
return wrapper.getRefCount();
}
throw CompilerDirectives.shouldNotReachHere();
}
}
@CApiBuiltin(ret = PyTypeObjectBorrowed, args = {PyObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyObject_ob_type extends CApiUnaryBuiltinNode {
@Specialization
static Object get(Object object,
@Bind Node inliningTarget) {
/*
* We are allocating native object stubs for each wrapper. Therefore, accesses to
* 'ob_type' should only be done on the native side. However, we allow access for
* debugging purposes and in managed mode.
*/
assert PythonContext.DEBUG_CAPI || !PythonContext.get(inliningTarget).isNativeAccessAllowed();
Object result = GetClassNode.executeUncached(object);
assert !(result instanceof Integer);
return result;
}
}
@CApiBuiltin(ret = Py_ssize_t, args = {PySetObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PySetObject_used extends CApiUnaryBuiltinNode {
@Specialization
static long get(PBaseSet object,
@Bind Node inliningTarget,
@Cached HashingStorageLen lenNode) {
return lenNode.execute(inliningTarget, object.getDictStorage());
}
}
@GenerateInline
@GenerateCached(false)
abstract static class GetSliceField extends Node {
abstract Object execute(Node inliningTarget, PSlice object, HiddenAttr key, Object value);
@Specialization
static Object get(Node inliningTarget, PSlice object, HiddenAttr key, Object value,
@Cached HiddenAttr.ReadNode read,
@Cached HiddenAttr.WriteNode write,
@Cached PythonCextBuiltins.PromoteBorrowedValue promote) {
Object promotedValue = read.execute(inliningTarget, object, key, null);
if (promotedValue == null) {
promotedValue = promote.execute(inliningTarget, value);
if (promotedValue == null) {
return value;
}
write.execute(inliningTarget, object, key, promotedValue);
}
return promotedValue;
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PySliceObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PySliceObject_start extends CApiUnaryBuiltinNode {
@Specialization
static Object doStart(PSlice object,
@Bind Node inliningTarget,
@Cached GetSliceField getSliceField) {
return getSliceField.execute(inliningTarget, object, PROMOTED_START, object.getStart());
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PySliceObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PySliceObject_step extends CApiUnaryBuiltinNode {
@Specialization
static Object doStep(PSlice object,
@Bind Node inliningTarget,
@Cached GetSliceField getSliceField) {
return getSliceField.execute(inliningTarget, object, PROMOTED_STEP, object.getStep());
}
}
@CApiBuiltin(ret = PyObjectBorrowed, args = {PySliceObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PySliceObject_stop extends CApiUnaryBuiltinNode {
@Specialization
static Object doStop(PSlice object,
@Bind Node inliningTarget,
@Cached GetSliceField getSliceField) {
return getSliceField.execute(inliningTarget, object, PROMOTED_STOP, object.getStop());
}
}
@CApiBuiltin(ret = Pointer, args = {PyUnicodeObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyUnicodeObject_data extends CApiUnaryBuiltinNode {
@Specialization
static Object get(PString object,
@Bind Node inliningTarget,
@Cached TruffleString.GetCodeRangeNode getCodeRangeNode,
@Cached TruffleString.SwitchEncodingNode switchEncodingNode,
@Cached CStructAccess.AllocateNode allocateNode,
@Cached CStructAccess.WriteTruffleStringNode writeTruffleStringNode,
@Cached HiddenAttr.ReadNode readAttrNode,
@Cached HiddenAttr.WriteNode writeAttrNode) {
NativeStringData nativeData = object.getNativeStringData(inliningTarget, readAttrNode);
if (nativeData != null) {
// in this case, we can just return the pointer
return nativeData.getPtr();
}
TruffleString string = object.getMaterialized();
TruffleString.CodeRange range = getCodeRangeNode.execute(string, TS_ENCODING);
TruffleString.Encoding encoding;
int charSize;
boolean isAscii = false;
if (range == TruffleString.CodeRange.ASCII) {
isAscii = true;
charSize = 1;
encoding = TruffleString.Encoding.US_ASCII;
} else if (range.isSubsetOf(TruffleString.CodeRange.LATIN_1)) {
charSize = 1;
encoding = TruffleString.Encoding.ISO_8859_1;
} else if (range.isSubsetOf(TruffleString.CodeRange.BMP)) {
charSize = 2;
encoding = TruffleString.Encoding.UTF_16;
} else {
charSize = 4;
encoding = TruffleString.Encoding.UTF_32;
}
string = switchEncodingNode.execute(string, encoding);
int byteLength = string.byteLength(encoding);
Object ptr = allocateNode.alloc(byteLength + /* null terminator */ charSize);
writeTruffleStringNode.write(ptr, string, encoding);
/*
* Set native data, so we can just return the pointer the next time.
*/
NativeStringData data = NativeStringData.create(charSize, isAscii, ptr, byteLength);
object.setNativeStringData(inliningTarget, writeAttrNode, data);
return ptr;
}
}
@CApiBuiltin(ret = Py_ssize_t, args = {PyVarObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyVarObject_ob_size extends CApiUnaryBuiltinNode {
@Specialization
static long get(Object object,
@Bind Node inliningTarget,
@Cached GetClassNode getClassNode,
@Cached TypeNodes.IsSameTypeNode isSameTypeNode,
@Cached ObSizeNode obSizeNode) {
assert !isSameTypeNode.execute(inliningTarget, getClassNode.execute(inliningTarget, object), PythonBuiltinClassType.PInt);
assert !isSameTypeNode.execute(inliningTarget, getClassNode.execute(inliningTarget, object), PythonBuiltinClassType.Boolean);
return obSizeNode.execute(inliningTarget, object);
}
}
@CApiBuiltin(ret = UINTPTR_T, args = {PyLongObject}, call = Ignored)
abstract static class GraalPyPrivate_Get_PyLongObject_long_value_lv_tag extends CApiUnaryBuiltinNode {
@Specialization
static long getLvTag(Object n,
@Bind Node inliningTarget,
@Cached CExtNodes.LvTagNode lvTagNode) {
return lvTagNode.execute(inliningTarget, n);
}
}
}
|
apache/giraph | 36,142 | giraph-core/src/main/java/org/apache/giraph/utils/WritableUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.utils;
import static org.apache.hadoop.util.ReflectionUtils.newInstance;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.apache.giraph.conf.ImmutableClassesGiraphConfiguration;
import org.apache.giraph.edge.Edge;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.factories.ValueFactory;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.zk.ZooKeeperExt;
import org.apache.giraph.zk.ZooKeeperExt.PathStat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.data.Stat;
/**
* Helper static methods for working with Writable objects.
*/
public class WritableUtils {
/**
* Don't construct.
*/
private WritableUtils() { }
/**
* Instantiate a new Writable, checking for NullWritable along the way.
*
* @param klass Class
* @param <W> type
* @return new instance of class
*/
public static <W extends Writable> W createWritable(Class<W> klass) {
return createWritable(klass, null);
}
/**
* Instantiate a new Writable, checking for NullWritable along the way.
*
* @param klass Class
* @param configuration Configuration
* @param <W> type
* @return new instance of class
*/
public static <W extends Writable> W createWritable(
Class<W> klass,
ImmutableClassesGiraphConfiguration configuration) {
W result;
if (NullWritable.class.equals(klass)) {
result = (W) NullWritable.get();
} else {
result = ReflectionUtils.newInstance(klass);
}
ConfigurationUtils.configureIfPossible(result, configuration);
return result;
}
/**
* Read fields from byteArray to a Writeable object.
*
* @param byteArray Byte array to find the fields in.
* @param writableObjects Objects to fill in the fields.
*/
public static void readFieldsFromByteArray(
byte[] byteArray, Writable... writableObjects) {
DataInputStream inputStream =
new DataInputStream(new ByteArrayInputStream(byteArray));
try {
for (Writable writableObject : writableObjects) {
writableObject.readFields(inputStream);
}
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArray: IOException", e);
}
}
/**
* Read fields from a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param watch Add a watch?
* @param stat Stat of znode if desired.
* @param writableObjects Objects to read into.
*/
public static void readFieldsFromZnode(ZooKeeperExt zkExt,
String zkPath,
boolean watch,
Stat stat,
Writable... writableObjects) {
try {
byte[] zkData = zkExt.getData(zkPath, false, stat);
readFieldsFromByteArray(zkData, writableObjects);
} catch (KeeperException e) {
throw new IllegalStateException(
"readFieldsFromZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"readFieldsFromZnode: InterrruptedStateException on " + zkPath, e);
}
}
/**
* Write object to a byte array.
*
* @param writableObjects Objects to write from.
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArray(Writable... writableObjects) {
ByteArrayOutputStream outputStream =
new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
try {
for (Writable writableObject : writableObjects) {
writableObject.write(output);
}
} catch (IOException e) {
throw new IllegalStateException(
"writeToByteArray: IOStateException", e);
}
return outputStream.toByteArray();
}
/**
* Read fields from byteArray to a Writeable object, skipping the size.
* Serialization method is choosable
*
* @param byteArray Byte array to find the fields in.
* @param writableObject Object to fill in the fields.
* @param unsafe Use unsafe deserialization
*/
public static void readFieldsFromByteArrayWithSize(
byte[] byteArray, Writable writableObject, boolean unsafe) {
ExtendedDataInput extendedDataInput;
if (unsafe) {
extendedDataInput = new UnsafeByteArrayInputStream(byteArray);
} else {
extendedDataInput = new ExtendedByteArrayDataInput(byteArray);
}
try {
extendedDataInput.readInt();
writableObject.readFields(extendedDataInput);
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArrayWithSize: IOException", e);
}
}
/**
* Write object to a byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param writableObject Object to write from.
* @param unsafe Use unsafe serialization?
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArrayWithSize(Writable writableObject,
boolean unsafe) {
return writeToByteArrayWithSize(writableObject, null, unsafe);
}
/**
* Write object to a byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param writableObject Object to write from.
* @param buffer Use this buffer instead
* @param unsafe Use unsafe serialization?
* @return Byte array with serialized object.
*/
public static byte[] writeToByteArrayWithSize(Writable writableObject,
byte[] buffer,
boolean unsafe) {
ExtendedDataOutput extendedDataOutput;
if (unsafe) {
extendedDataOutput = new UnsafeByteArrayOutputStream(buffer);
} else {
extendedDataOutput = new ExtendedByteArrayDataOutput(buffer);
}
try {
extendedDataOutput.writeInt(-1);
writableObject.write(extendedDataOutput);
extendedDataOutput.writeInt(0, extendedDataOutput.getPos());
} catch (IOException e) {
throw new IllegalStateException("writeToByteArrayWithSize: " +
"IOException", e);
}
return extendedDataOutput.getByteArray();
}
/**
* Write object to a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param version Version of the write.
* @param writableObjects Objects to write from.
* @return Path and stat information of the znode.
*/
public static PathStat writeToZnode(ZooKeeperExt zkExt,
String zkPath,
int version,
Writable... writableObjects) {
try {
byte[] byteArray = writeToByteArray(writableObjects);
return zkExt.createOrSetExt(zkPath,
byteArray,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true,
version);
} catch (KeeperException e) {
throw new IllegalStateException(
"writeToZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"writeToZnode: InterruptedException on " + zkPath, e);
}
}
/**
* Write list of object to a byte array.
*
* @param writableList List of object to write from.
* @return Byte array with serialized objects.
*/
public static byte[] writeListToByteArray(
List<? extends Writable> writableList) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
try {
output.writeInt(writableList.size());
for (Writable writable : writableList) {
writable.write(output);
}
} catch (IOException e) {
throw new IllegalStateException(
"writeListToByteArray: IOException", e);
}
return outputStream.toByteArray();
}
/**
* Write list of objects to a ZooKeeper znode.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param version Version of the write.
* @param writableList List of objects to write from.
* @return Path and stat information of the znode.
*/
public static PathStat writeListToZnode(
ZooKeeperExt zkExt,
String zkPath,
int version,
List<? extends Writable> writableList) {
try {
return zkExt.createOrSetExt(
zkPath,
writeListToByteArray(writableList),
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true,
version);
} catch (KeeperException e) {
throw new IllegalStateException(
"writeListToZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"writeListToZnode: InterruptedException on " + zkPath, e);
}
}
/**
* Read fields from byteArray to a list of objects.
*
* @param byteArray Byte array to find the fields in.
* @param writableClass Class of the objects to instantiate.
* @param conf Configuration used for instantiation (i.e Configurable)
* @param <T> Object type
* @return List of objects.
*/
public static <T extends Writable> List<T> readListFieldsFromByteArray(
byte[] byteArray,
Class<? extends T> writableClass,
Configuration conf) {
try {
DataInputStream inputStream =
new DataInputStream(new ByteArrayInputStream(byteArray));
int size = inputStream.readInt();
List<T> writableList = new ArrayList<T>(size);
for (int i = 0; i < size; ++i) {
T writable = newInstance(writableClass, conf);
writable.readFields(inputStream);
writableList.add(writable);
}
return writableList;
} catch (IOException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: IOException", e);
}
}
/**
* Read fields from a ZooKeeper znode into a list of objects.
*
* @param zkExt ZooKeeper instance.
* @param zkPath Path of znode.
* @param watch Add a watch?
* @param stat Stat of znode if desired.
* @param writableClass Class of the objects to instantiate.
* @param conf Configuration used for instantiation (i.e Configurable)
* @param <T> Object type
* @return List of objects.
*/
public static <T extends Writable> List<T> readListFieldsFromZnode(
ZooKeeperExt zkExt,
String zkPath,
boolean watch,
Stat stat,
Class<? extends T> writableClass,
Configuration conf) {
try {
byte[] zkData = zkExt.getData(zkPath, false, stat);
return WritableUtils.<T>readListFieldsFromByteArray(zkData,
writableClass, conf);
} catch (KeeperException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: KeeperException on " + zkPath, e);
} catch (InterruptedException e) {
throw new IllegalStateException(
"readListFieldsFromZnode: InterruptedException on " + zkPath,
e);
}
}
/**
* Write ExtendedDataOutput to DataOutput
*
* @param extendedDataOutput ExtendedDataOutput to write
* @param out DataOutput to write to
*/
public static void writeExtendedDataOutput(
ExtendedDataOutput extendedDataOutput, DataOutput out)
throws IOException {
out.writeInt(extendedDataOutput.getPos());
out.write(
extendedDataOutput.getByteArray(), 0, extendedDataOutput.getPos());
}
/**
* Read ExtendedDataOutput from DataInput
*
* @param in DataInput to read from
* @param conf Configuration
* @return ExtendedDataOutput read
*/
public static ExtendedDataOutput readExtendedDataOutput(DataInput in,
ImmutableClassesGiraphConfiguration conf) throws IOException {
int size = in.readInt();
byte[] buf = new byte[size];
in.readFully(buf);
return conf.createExtendedDataOutput(buf, size);
}
/**
* Write vertex data to byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param vertex Vertex to write from.
* @param buffer Use this buffer instead
* @param unsafe Use unsafe serialization?
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return Byte array with serialized object.
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> byte[] writeVertexToByteArray(
Vertex<I, V, E> vertex,
byte[] buffer,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
ExtendedDataOutput extendedDataOutput;
if (unsafe) {
extendedDataOutput = new UnsafeByteArrayOutputStream(buffer);
} else {
extendedDataOutput = new ExtendedByteArrayDataOutput(buffer);
}
try {
extendedDataOutput.writeInt(-1);
writeVertexToDataOutput(extendedDataOutput, vertex, conf);
extendedDataOutput.writeInt(0, extendedDataOutput.getPos());
} catch (IOException e) {
throw new IllegalStateException("writeVertexToByteArray: " +
"IOException", e);
}
return extendedDataOutput.getByteArray();
}
/**
* Write vertex data to byte array with the first 4 bytes as the size of the
* entire buffer (including the size).
*
* @param vertex Vertex to write from.
* @param unsafe Use unsafe serialization?
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return Byte array with serialized object.
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> byte[] writeVertexToByteArray(
Vertex<I, V, E> vertex,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
return writeVertexToByteArray(vertex, null, unsafe, conf);
}
/**
* Read vertex data from byteArray to a Writeable object, skipping the size.
* Serialization method is choosable. Assumes the vertex has already been
* initialized and contains values for Id, value, and edges.
*
* @param byteArray Byte array to find the fields in.
* @param vertex Vertex to fill in the fields.
* @param unsafe Use unsafe deserialization
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @param conf Configuration
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void reinitializeVertexFromByteArray(
byte[] byteArray,
Vertex<I, V, E> vertex,
boolean unsafe,
ImmutableClassesGiraphConfiguration<I, V, E> conf) {
ExtendedDataInput extendedDataInput;
if (unsafe) {
extendedDataInput = new UnsafeByteArrayInputStream(byteArray);
} else {
extendedDataInput = new ExtendedByteArrayDataInput(byteArray);
}
try {
extendedDataInput.readInt();
reinitializeVertexFromDataInput(extendedDataInput, vertex, conf);
} catch (IOException e) {
throw new IllegalStateException(
"readFieldsFromByteArrayWithSize: IOException", e);
}
}
/**
* Write an edge to an output stream.
*
* @param out Data output
* @param edge Edge to write
* @param <I> Vertex id
* @param <E> Edge value
* @throws IOException
*/
public static <I extends WritableComparable, E extends Writable>
void writeEdge(DataOutput out, Edge<I, E> edge) throws IOException {
edge.getTargetVertexId().write(out);
edge.getValue().write(out);
}
/**
* Read an edge from an input stream.
*
* @param in Data input
* @param edge Edge to fill in-place
* @param <I> Vertex id
* @param <E> Edge value
* @throws IOException
*/
public static <I extends WritableComparable, E extends Writable>
void readEdge(DataInput in, Edge<I, E> edge) throws IOException {
edge.getTargetVertexId().readFields(in);
edge.getValue().readFields(in);
}
/**
* Reads data from input stream to initialize Vertex. Assumes the vertex has
* already been initialized and contains values for Id, value, and edges.
*
* @param input The input stream
* @param vertex The vertex to initialize
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void reinitializeVertexFromDataInput(
DataInput input,
Vertex<I, V, E> vertex,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
vertex.getId().readFields(input);
vertex.getValue().readFields(input);
((OutEdges<I, E>) vertex.getEdges()).readFields(input);
if (input.readBoolean()) {
vertex.voteToHalt();
} else {
vertex.wakeUp();
}
}
/**
* Reads data from input stream to initialize Vertex.
*
* @param input The input stream
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @return The vertex
* @throws IOException
*/
public static <I extends WritableComparable, V extends Writable,
E extends Writable> Vertex<I, V, E>
readVertexFromDataInput(
DataInput input,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
Vertex<I, V, E> vertex = conf.createVertex();
I id = conf.createVertexId();
V value = conf.createVertexValue();
OutEdges<I, E> edges = conf.createOutEdges();
vertex.initialize(id, value, edges);
reinitializeVertexFromDataInput(input, vertex, conf);
return vertex;
}
/**
* Writes Vertex data to output stream.
*
* @param output the output stream
* @param vertex The vertex to serialize
* @param conf Configuration
* @param <I> Vertex id
* @param <V> Vertex value
* @param <E> Edge value
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <I extends WritableComparable, V extends Writable,
E extends Writable> void writeVertexToDataOutput(
DataOutput output,
Vertex<I, V, E> vertex,
ImmutableClassesGiraphConfiguration<I, V, E> conf)
throws IOException {
vertex.getId().write(output);
vertex.getValue().write(output);
((OutEdges<I, E>) vertex.getEdges()).write(output);
output.writeBoolean(vertex.isHalted());
}
/**
* Write class to data output. Also handles the case when class is null.
*
* @param clazz Class
* @param output Data output
* @param <T> Class type
*/
public static <T> void writeClass(Class<T> clazz,
DataOutput output) throws IOException {
output.writeBoolean(clazz != null);
if (clazz != null) {
output.writeUTF(clazz.getName());
}
}
/**
* Read class from data input.
* Matches {@link #writeClass(Class, DataOutput)}.
*
* @param input Data input
* @param <T> Class type
* @return Class, or null if null was written
*/
@SuppressWarnings("unchecked")
public static <T> Class<T> readClass(DataInput input) throws IOException {
if (input.readBoolean()) {
String className = input.readUTF();
try {
return (Class<T>) Class.forName(className);
} catch (ClassNotFoundException e) {
throw new IllegalStateException("readClass: No class found " +
className);
}
} else {
return null;
}
}
/**
* Write object to output stream
* @param object Object
* @param output Output stream
* @throws IOException
*/
public static void writeWritableObject(
Writable object, DataOutput output)
throws IOException {
output.writeBoolean(object != null);
if (object != null) {
output.writeUTF(object.getClass().getName());
object.write(output);
}
}
/**
* Reads object from input stream
* @param input Input stream
* @param conf Configuration
* @param <T> Object type
* @return Object
* @throws IOException
*/
public static <T extends Writable>
T readWritableObject(DataInput input,
ImmutableClassesGiraphConfiguration conf) throws IOException {
if (input.readBoolean()) {
String className = input.readUTF();
try {
T object =
(T) ReflectionUtils.newInstance(Class.forName(className), conf);
object.readFields(input);
return object;
} catch (ClassNotFoundException e) {
throw new IllegalStateException("readWritableObject: No class found " +
className);
}
} else {
return null;
}
}
/**
* Writes a list of Writable objects into output stream.
* This method is trying to optimize space occupied by class information only
* storing class object if it is different from the previous one
* as in most cases arrays tend to have objects of the same type inside.
* @param list serialized object
* @param output the output stream
* @throws IOException
*/
public static void writeList(List<? extends Writable> list, DataOutput output)
throws IOException {
output.writeBoolean(list != null);
if (list != null) {
output.writeInt(list.size());
Class<? extends Writable> clazz = null;
for (Writable element : list) {
output.writeBoolean(element == null);
if (element != null) {
if (element.getClass() != clazz) {
clazz = element.getClass();
output.writeBoolean(true);
writeClass(clazz, output);
} else {
output.writeBoolean(false);
}
element.write(output);
}
}
}
}
/**
* Reads list of Writable objects from data input stream.
* Input stream should have class information along with object data.
* @param input input stream
* @return deserialized list
* @throws IOException
*/
public static List<? extends Writable> readList(DataInput input)
throws IOException {
try {
List<Writable> res = null;
if (input.readBoolean()) {
int size = input.readInt();
res = new ArrayList<>(size);
Class<? extends Writable> clazz = null;
for (int i = 0; i < size; i++) {
boolean isNull = input.readBoolean();
if (isNull) {
res.add(null);
} else {
boolean hasClassInfo = input.readBoolean();
if (hasClassInfo) {
clazz = readClass(input);
}
Writable element = clazz.newInstance();
element.readFields(input);
res.add(element);
}
}
}
return res;
} catch (InstantiationException | IllegalAccessException e) {
throw new IllegalStateException("unable to instantiate object", e);
}
}
/**
* Writes primitive int array of ints into output stream.
* Array can be null or empty.
* @param array array to be written
* @param dataOutput output stream
* @throws IOException
*/
public static void writeIntArray(int[] array, DataOutput dataOutput)
throws IOException {
if (array != null) {
dataOutput.writeInt(array.length);
for (int r : array) {
dataOutput.writeInt(r);
}
} else {
dataOutput.writeInt(-1);
}
}
/**
* Reads primitive int array from input stream.
* @param dataInput input stream to read from
* @return may return null or empty array.
* @throws IOException
*/
public static int[] readIntArray(DataInput dataInput)
throws IOException {
int [] res = null;
int size = dataInput.readInt();
if (size >= 0) {
res = new int[size];
for (int i = 0; i < size; i++) {
res[i] = dataInput.readInt();
}
}
return res;
}
/**
* Writes primitive long array of ints into output stream.
* Array can be null or empty.
* @param array array to be written
* @param dataOutput output stream
* @throws IOException
*/
public static void writeLongArray(DataOutput dataOutput, long[] array)
throws IOException {
if (array != null) {
dataOutput.writeInt(array.length);
for (long r : array) {
dataOutput.writeLong(r);
}
} else {
dataOutput.writeInt(-1);
}
}
/**
* Reads primitive long array from input stream.
* @param dataInput input stream to read from
* @return may return null or empty array.
* @throws IOException
*/
public static long[] readLongArray(DataInput dataInput)
throws IOException {
long [] res = null;
int size = dataInput.readInt();
if (size >= 0) {
res = new long[size];
for (int i = 0; i < size; i++) {
res[i] = dataInput.readLong();
}
}
return res;
}
/**
* Writes enum into a stream, by serializing class name and it's index
* @param enumValue Enum value
* @param output Output stream
* @param <T> Enum type
*/
public static <T extends Enum<T>> void writeEnum(T enumValue,
DataOutput output) throws IOException {
writeClass(
enumValue != null ? enumValue.getDeclaringClass() : null, output);
if (enumValue != null) {
Varint.writeUnsignedVarInt(enumValue.ordinal(), output);
}
}
/**
* Reads enum from the stream, serialized by writeEnum
* @param input Input stream
* @param <T> Enum type
* @return Enum value
*/
public static <T extends Enum<T>> T readEnum(DataInput input) throws
IOException {
Class<T> clazz = readClass(input);
if (clazz != null) {
int ordinal = Varint.readUnsignedVarInt(input);
try {
T[] values = (T[]) clazz.getDeclaredMethod("values").invoke(null);
return values[ordinal];
} catch (IllegalAccessException | IllegalArgumentException |
InvocationTargetException | NoSuchMethodException |
SecurityException e) {
throw new IOException("Cannot read enum", e);
}
} else {
return null;
}
}
/**
* Copy {@code from} into {@code to}, by serializing and deserializing it.
* Since it is creating streams inside, it's mostly useful for
* tests/non-performant code.
*
* @param from Object to copy from
* @param to Object to copy into
* @param <T> Type of the object
*/
public static <T extends Writable> void copyInto(T from, T to) {
copyInto(from, to, false);
}
/**
* Copy {@code from} into {@code to}, by serializing and deserializing it.
* Since it is creating streams inside, it's mostly useful for
* tests/non-performant code.
*
* @param from Object to copy from
* @param to Object to copy into
* @param checkOverRead if true, will add one more byte at the end of writing,
* to make sure read is not touching it. Useful for tests
* @param <T> Type of the object
*/
public static <T extends Writable> void copyInto(
T from, T to, boolean checkOverRead) {
try {
if (from.getClass() != to.getClass()) {
throw new RuntimeException(
"Trying to copy from " + from.getClass() +
" into " + to.getClass());
}
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream();
from.write(out);
if (checkOverRead) {
out.writeByte(0);
}
UnsafeByteArrayInputStream in =
new UnsafeByteArrayInputStream(out.getByteArray(), 0, out.getPos());
to.readFields(in);
if (in.available() != (checkOverRead ? 1 : 0)) {
throw new RuntimeException(
"Serialization encountered issues with " + from.getClass() + ", " +
(in.available() - (checkOverRead ? 1 : 0)) + " fewer bytes read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param reusableOut Reusable output stream to serialize into
* @param reusableIn Reusable input stream to deserialize out of
* @param original Original value of which to make a copy
* @param conf Configuration
* @param <T> Type of the object
* @return Copy of the original value
*/
public static <T extends Writable> T createCopy(
UnsafeByteArrayOutputStream reusableOut,
UnsafeReusableByteArrayInput reusableIn, T original,
ImmutableClassesGiraphConfiguration conf) {
T copy = (T) createWritable(original.getClass(), conf);
try {
reusableOut.reset();
original.write(reusableOut);
reusableIn.initialize(
reusableOut.getByteArray(), 0, reusableOut.getPos());
copy.readFields(reusableIn);
if (reusableIn.available() != 0) {
throw new RuntimeException("Serialization of " +
original.getClass() + " encountered issues, " +
reusableIn.available() + " bytes left to be read");
}
} catch (IOException e) {
throw new IllegalStateException(
"IOException occurred while trying to create a copy " +
original.getClass(), e);
}
return copy;
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable> T createCopy(T original) {
return (T) createCopy(original, original.getClass(), null);
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @param outputClass Expected copy class, needs to match original
* @param conf Configuration
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable>
T createCopy(T original, Class<? extends T> outputClass,
ImmutableClassesGiraphConfiguration conf) {
T result = WritableUtils.createWritable(outputClass, conf);
copyInto(original, result);
return result;
}
/**
* Create a copy of Writable object, by serializing and deserializing it.
*
* @param original Original value of which to make a copy
* @param classFactory Factory to create new empty object from
* @param conf Configuration
* @return Copy of the original value
* @param <T> Type of the object
*/
public static final <T extends Writable>
T createCopy(T original, ValueFactory<T> classFactory,
ImmutableClassesGiraphConfiguration conf) {
T result = classFactory.newInstance();
copyInto(original, result);
return result;
}
/**
* Serialize given writable object, and return it's size.
*
* @param w Writable object
* @return it's size after serialization
*/
public static int size(Writable w) {
try {
ExtendedByteArrayDataOutput out = new ExtendedByteArrayDataOutput();
w.write(out);
return out.getPos();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize given writable to byte array,
* using new instance of ExtendedByteArrayDataOutput.
*
* @param w Writable object
* @return array of bytes
* @param <T> Type of the object
*/
public static <T extends Writable> byte[] toByteArray(T w) {
try {
ExtendedByteArrayDataOutput out = new ExtendedByteArrayDataOutput();
w.write(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Deserialize from given byte array into given writable,
* using new instance of ExtendedByteArrayDataInput.
*
* @param data Byte array representing writable
* @param to Object to fill
* @param <T> Type of the object
*/
public static <T extends Writable> void fromByteArray(byte[] data, T to) {
try {
ExtendedByteArrayDataInput in =
new ExtendedByteArrayDataInput(data, 0, data.length);
to.readFields(in);
if (in.available() != 0) {
throw new RuntimeException(
"Serialization encountered issues, " + in.available() +
" bytes left to be read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize given writable to byte array,
* using new instance of UnsafeByteArrayOutputStream.
*
* @param w Writable object
* @return array of bytes
* @param <T> Type of the object
*/
public static <T extends Writable> byte[] toByteArrayUnsafe(T w) {
try {
UnsafeByteArrayOutputStream out = new UnsafeByteArrayOutputStream();
w.write(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Deserialize from given byte array into given writable,
* using given reusable UnsafeReusableByteArrayInput.
*
* @param data Byte array representing writable
* @param to Object to fill
* @param reusableInput Reusable input to use
* @param <T> Type of the object
*/
public static <T extends Writable> void fromByteArrayUnsafe(
byte[] data, T to, UnsafeReusableByteArrayInput reusableInput) {
try {
reusableInput.initialize(data, 0, data.length);
to.readFields(reusableInput);
if (reusableInput.available() != 0) {
throw new RuntimeException(
"Serialization encountered issues, " + reusableInput.available() +
" bytes left to be read");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* First write a boolean saying whether an object is not null,
* and if it's not write the object
*
* @param object Object to write
* @param out DataOutput to write to
* @param <T> Object type
*/
public static <T extends Writable> void writeIfNotNullAndObject(T object,
DataOutput out) throws IOException {
out.writeBoolean(object != null);
if (object != null) {
object.write(out);
}
}
/**
* First read a boolean saying whether an object is not null,
* and if it's not read the object
*
* @param reusableObject Reuse this object instance
* @param objectClass Class of the object, to create if reusableObject is null
* @param in DataInput to read from
* @param <T> Object type
* @return Object, or null
*/
public static <T extends Writable> T readIfNotNullAndObject(T reusableObject,
Class<T> objectClass, DataInput in) throws IOException {
if (in.readBoolean()) {
if (reusableObject == null) {
reusableObject = ReflectionUtils.newInstance(objectClass);
}
reusableObject.readFields(in);
return reusableObject;
} else {
return null;
}
}
}
|
apache/hbase | 36,521 | hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.util.Comparator;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
/**
* Compare two HBase cells. Do not use this method comparing <code>-ROOT-</code> or
* <code>hbase:meta</code> cells. Cells from these tables need a specialized comparator, one that
* takes account of the special formatting of the row where we have commas to delimit table from
* regionname, from row. See KeyValue for how it has a special comparator to do hbase:meta cells and
* yet another for -ROOT-.
* <p>
* While using this comparator for {{@link #compareRows(Cell, Cell)} et al, the hbase:meta cells
* format should be taken into consideration, for which the instance of this comparator should be
* used. In all other cases the static APIs in this comparator would be enough
* <p>
* HOT methods. We spend a good portion of CPU comparing. Anything that makes the compare faster
* will likely manifest at the macro level.
* </p>
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "UNKNOWN",
justification = "Findbugs doesn't like the way we are negating the result of"
+ " a compare in below")
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class CellComparatorImpl implements CellComparator {
private static final long serialVersionUID = 8186411895799094989L;
/**
* Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion of
* KeyValue only.
*/
public static final CellComparatorImpl COMPARATOR = new CellComparatorImpl();
@Override
public final int compare(final Cell a, final Cell b) {
return compare(a, b, false);
}
@Override
public int compare(final Cell l, final Cell r, boolean ignoreSequenceid) {
int diff = 0;
// "Peel off" the most common path.
if (l instanceof KeyValue && r instanceof KeyValue) {
diff = compareKeyValues((KeyValue) l, (KeyValue) r);
if (diff != 0) {
return diff;
}
} else if (l instanceof KeyValue && r instanceof ByteBufferKeyValue) {
diff = compareKVVsBBKV((KeyValue) l, (ByteBufferKeyValue) r);
if (diff != 0) {
return diff;
}
} else if (l instanceof ByteBufferKeyValue && r instanceof KeyValue) {
diff = compareKVVsBBKV((KeyValue) r, (ByteBufferKeyValue) l);
if (diff != 0) {
// negate- Findbugs will complain?
return -diff;
}
} else if (l instanceof ByteBufferKeyValue && r instanceof ByteBufferKeyValue) {
diff = compareBBKV((ByteBufferKeyValue) l, (ByteBufferKeyValue) r);
if (diff != 0) {
return diff;
}
} else {
int leftRowLength = l.getRowLength();
int rightRowLength = r.getRowLength();
diff = compareRows(l, leftRowLength, r, rightRowLength);
if (diff != 0) {
return diff;
}
diff = compareWithoutRow(l, r);
if (diff != 0) {
return diff;
}
}
if (ignoreSequenceid) {
return diff;
}
// Negate following comparisons so later edits show up first mvccVersion: later sorts first
return Long.compare(PrivateCellUtil.getSequenceId(r), PrivateCellUtil.getSequenceId(l));
}
private int compareKeyValues(final KeyValue left, final KeyValue right) {
int diff;
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
diff = Bytes.compareTo(left.getRowArray(), left.getRowOffset(), leftRowLength,
right.getRowArray(), right.getRowOffset(), rightRowLength);
if (diff != 0) {
return diff;
}
// If the column is not specified, the "minimum" key type appears as latest in the sorted
// order, regardless of the timestamp. This is used for specifying the last key/value in a
// given row, because there is no "lexicographically last column" (it would be infinitely
// long).
// The "maximum" key type does not need this behavior. Copied from KeyValue. This is bad in
// that
// we can't do memcmp w/ special rules like this.
// TODO: Is there a test for this behavior?
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
int leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
byte leftType = left.getTypeByte(leftKeyLength);
if (
leftType == KeyValue.Type.Minimum.getCode() && leftFamilyLength + leftQualifierLength == 0
) {
// left is "bigger", i.e. it appears later in the sorted order
return 1;
}
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
int rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// No need of right row length below here.
byte rightType = right.getTypeByte(rightKeyLength);
if (
rightType == KeyValue.Type.Minimum.getCode() && rightFamilyLength + rightQualifierLength == 0
) {
return -1;
}
// Compare families.
int leftFamilyPosition = left.getFamilyOffset(leftFamilyLengthPosition);
int rightFamilyPosition = right.getFamilyOffset(rightFamilyLengthPosition);
diff = compareFamilies(left, leftFamilyPosition, leftFamilyLength, right, rightFamilyPosition,
rightFamilyLength);
if (diff != 0) {
return diff;
}
// Compare qualifiers
diff = Bytes.compareTo(left.getQualifierArray(),
left.getQualifierOffset(leftFamilyPosition, leftFamilyLength), leftQualifierLength,
right.getQualifierArray(), right.getQualifierOffset(rightFamilyPosition, rightFamilyLength),
rightQualifierLength);
if (diff != 0) {
return diff;
}
// Timestamps.
// Swap order we pass into compare so we get DESCENDING order.
// TODO : Ensure we read the bytes and do the compare instead of the value.
diff = Long.compare(right.getTimestamp(rightKeyLength), left.getTimestamp(leftKeyLength));
if (diff != 0) {
return diff;
}
// Compare types. Let the delete types sort ahead of puts; i.e. types
// of higher numbers sort before those of lesser numbers. Maximum (255)
// appears ahead of everything, and minimum (0) appears after
// everything.
return (0xff & rightType) - (0xff & leftType);
}
private int compareBBKV(final ByteBufferKeyValue left, final ByteBufferKeyValue right) {
int diff;
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
diff = ByteBufferUtils.compareTo(left.getRowByteBuffer(), left.getRowPosition(), leftRowLength,
right.getRowByteBuffer(), right.getRowPosition(), rightRowLength);
if (diff != 0) {
return diff;
}
// If the column is not specified, the "minimum" key type appears as latest in the sorted
// order, regardless of the timestamp. This is used for specifying the last key/value in a
// given row, because there is no "lexicographically last column" (it would be infinitely
// long).
// The "maximum" key type does not need this behavior. Copied from KeyValue. This is bad in
// that
// we can't do memcmp w/ special rules like this.
// TODO: Is there a test for this behavior?
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
int leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
byte leftType = left.getTypeByte(leftKeyLength);
if (
leftType == KeyValue.Type.Minimum.getCode() && leftFamilyLength + leftQualifierLength == 0
) {
// left is "bigger", i.e. it appears later in the sorted order
return 1;
}
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
int rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// No need of right row length below here.
byte rightType = right.getTypeByte(rightKeyLength);
if (
rightType == KeyValue.Type.Minimum.getCode() && rightFamilyLength + rightQualifierLength == 0
) {
return -1;
}
// Compare families.
int leftFamilyPosition = left.getFamilyPosition(leftFamilyLengthPosition);
int rightFamilyPosition = right.getFamilyPosition(rightFamilyLengthPosition);
diff = compareFamilies(left, leftFamilyPosition, leftFamilyLength, right, rightFamilyPosition,
rightFamilyLength);
if (diff != 0) {
return diff;
}
// Compare qualifiers
diff = ByteBufferUtils.compareTo(left.getQualifierByteBuffer(),
left.getQualifierPosition(leftFamilyPosition, leftFamilyLength), leftQualifierLength,
right.getQualifierByteBuffer(),
right.getQualifierPosition(rightFamilyPosition, rightFamilyLength), rightQualifierLength);
if (diff != 0) {
return diff;
}
// Timestamps.
// Swap order we pass into compare so we get DESCENDING order.
diff = Long.compare(right.getTimestamp(rightKeyLength), left.getTimestamp(leftKeyLength));
if (diff != 0) {
return diff;
}
// Compare types. Let the delete types sort ahead of puts; i.e. types
// of higher numbers sort before those of lesser numbers. Maximum (255)
// appears ahead of everything, and minimum (0) appears after
// everything.
return (0xff & rightType) - (0xff & leftType);
}
private int compareKVVsBBKV(final KeyValue left, final ByteBufferKeyValue right) {
int diff;
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
diff = ByteBufferUtils.compareTo(left.getRowArray(), left.getRowOffset(), leftRowLength,
right.getRowByteBuffer(), right.getRowPosition(), rightRowLength);
if (diff != 0) {
return diff;
}
// If the column is not specified, the "minimum" key type appears as latest in the sorted
// order, regardless of the timestamp. This is used for specifying the last key/value in a
// given row, because there is no "lexicographically last column" (it would be infinitely
// long).
// The "maximum" key type does not need this behavior. Copied from KeyValue. This is bad in
// that
// we can't do memcmp w/ special rules like this.
// TODO: Is there a test for this behavior?
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
int leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
byte leftType = left.getTypeByte(leftKeyLength);
if (
leftType == KeyValue.Type.Minimum.getCode() && leftFamilyLength + leftQualifierLength == 0
) {
// left is "bigger", i.e. it appears later in the sorted order
return 1;
}
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
int rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// No need of right row length below here.
byte rightType = right.getTypeByte(rightKeyLength);
if (
rightType == KeyValue.Type.Minimum.getCode() && rightFamilyLength + rightQualifierLength == 0
) {
return -1;
}
// Compare families.
int leftFamilyPosition = left.getFamilyOffset(leftFamilyLengthPosition);
int rightFamilyPosition = right.getFamilyPosition(rightFamilyLengthPosition);
diff = compareFamilies(left, leftFamilyPosition, leftFamilyLength, right, rightFamilyPosition,
rightFamilyLength);
if (diff != 0) {
return diff;
}
// Compare qualifiers
diff = ByteBufferUtils.compareTo(left.getQualifierArray(),
left.getQualifierOffset(leftFamilyPosition, leftFamilyLength), leftQualifierLength,
right.getQualifierByteBuffer(),
right.getQualifierPosition(rightFamilyPosition, rightFamilyLength), rightQualifierLength);
if (diff != 0) {
return diff;
}
// Timestamps.
// Swap order we pass into compare so we get DESCENDING order.
diff = Long.compare(right.getTimestamp(rightKeyLength), left.getTimestamp(leftKeyLength));
if (diff != 0) {
return diff;
}
// Compare types. Let the delete types sort ahead of puts; i.e. types
// of higher numbers sort before those of lesser numbers. Maximum (255)
// appears ahead of everything, and minimum (0) appears after
// everything.
return (0xff & rightType) - (0xff & leftType);
}
/**
* Compares the family and qualifier part of the cell
* @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise
*/
public final int compareColumns(final Cell left, final Cell right) {
int diff = compareFamilies(left, right);
if (diff != 0) {
return diff;
}
return compareQualifiers(left, right);
}
private int compareColumns(final Cell left, final int leftFamLen, final int leftQualLen,
final Cell right, final int rightFamLen, final int rightQualLen) {
int diff = compareFamilies(left, leftFamLen, right, rightFamLen);
if (diff != 0) {
return diff;
}
return compareQualifiers(left, leftQualLen, right, rightQualLen);
}
/**
* This method will be overridden when we compare cells inner store to bypass family comparing.
*/
protected int compareFamilies(Cell left, int leftFamLen, Cell right, int rightFamLen) {
if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getFamilyByteBuffer(),
((ByteBufferExtendedCell) left).getFamilyPosition(), leftFamLen,
((ByteBufferExtendedCell) right).getFamilyByteBuffer(),
((ByteBufferExtendedCell) right).getFamilyPosition(), rightFamLen);
}
if (left instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getFamilyByteBuffer(),
((ByteBufferExtendedCell) left).getFamilyPosition(), leftFamLen, right.getFamilyArray(),
right.getFamilyOffset(), rightFamLen);
}
if (right instanceof ByteBufferExtendedCell) {
// Notice how we flip the order of the compare here. We used to negate the return value but
// see what FindBugs says
// http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO
// It suggest flipping the order to get same effect and 'safer'.
return ByteBufferUtils.compareTo(left.getFamilyArray(), left.getFamilyOffset(), leftFamLen,
((ByteBufferExtendedCell) right).getFamilyByteBuffer(),
((ByteBufferExtendedCell) right).getFamilyPosition(), rightFamLen);
}
return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), leftFamLen,
right.getFamilyArray(), right.getFamilyOffset(), rightFamLen);
}
private final int compareQualifiers(Cell left, int leftQualLen, Cell right, int rightQualLen) {
if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getQualifierByteBuffer(),
((ByteBufferExtendedCell) left).getQualifierPosition(), leftQualLen,
((ByteBufferExtendedCell) right).getQualifierByteBuffer(),
((ByteBufferExtendedCell) right).getQualifierPosition(), rightQualLen);
}
if (left instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getQualifierByteBuffer(),
((ByteBufferExtendedCell) left).getQualifierPosition(), leftQualLen,
right.getQualifierArray(), right.getQualifierOffset(), rightQualLen);
}
if (right instanceof ByteBufferExtendedCell) {
// Notice how we flip the order of the compare here. We used to negate the return value but
// see what FindBugs says
// http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO
// It suggest flipping the order to get same effect and 'safer'.
return ByteBufferUtils.compareTo(left.getQualifierArray(), left.getQualifierOffset(),
leftQualLen, ((ByteBufferExtendedCell) right).getQualifierByteBuffer(),
((ByteBufferExtendedCell) right).getQualifierPosition(), rightQualLen);
}
return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), leftQualLen,
right.getQualifierArray(), right.getQualifierOffset(), rightQualLen);
}
/**
* Compare the families of left and right cell
* @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise
*/
@Override
public final int compareFamilies(Cell left, Cell right) {
if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getFamilyByteBuffer(),
((ByteBufferExtendedCell) left).getFamilyPosition(), left.getFamilyLength(),
((ByteBufferExtendedCell) right).getFamilyByteBuffer(),
((ByteBufferExtendedCell) right).getFamilyPosition(), right.getFamilyLength());
}
if (left instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getFamilyByteBuffer(),
((ByteBufferExtendedCell) left).getFamilyPosition(), left.getFamilyLength(),
right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength());
}
if (right instanceof ByteBufferExtendedCell) {
// Notice how we flip the order of the compare here. We used to negate the return value but
// see what FindBugs says
// http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO
// It suggest flipping the order to get same effect and 'safer'.
return ByteBufferUtils.compareTo(left.getFamilyArray(), left.getFamilyOffset(),
left.getFamilyLength(), ((ByteBufferExtendedCell) right).getFamilyByteBuffer(),
((ByteBufferExtendedCell) right).getFamilyPosition(), right.getFamilyLength());
}
return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(),
right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength());
}
/**
* This method will be overridden when we compare cells inner store to bypass family comparing.
*/
protected int compareFamilies(KeyValue left, int leftFamilyPosition, int leftFamilyLength,
KeyValue right, int rightFamilyPosition, int rightFamilyLength) {
return Bytes.compareTo(left.getFamilyArray(), leftFamilyPosition, leftFamilyLength,
right.getFamilyArray(), rightFamilyPosition, rightFamilyLength);
}
/**
* This method will be overridden when we compare cells inner store to bypass family comparing.
*/
protected int compareFamilies(ByteBufferKeyValue left, int leftFamilyPosition,
int leftFamilyLength, ByteBufferKeyValue right, int rightFamilyPosition,
int rightFamilyLength) {
return ByteBufferUtils.compareTo(left.getFamilyByteBuffer(), leftFamilyPosition,
leftFamilyLength, right.getFamilyByteBuffer(), rightFamilyPosition, rightFamilyLength);
}
/**
* This method will be overridden when we compare cells inner store to bypass family comparing.
*/
protected int compareFamilies(KeyValue left, int leftFamilyPosition, int leftFamilyLength,
ByteBufferKeyValue right, int rightFamilyPosition, int rightFamilyLength) {
return ByteBufferUtils.compareTo(left.getFamilyArray(), leftFamilyPosition, leftFamilyLength,
right.getFamilyByteBuffer(), rightFamilyPosition, rightFamilyLength);
}
static int compareQualifiers(KeyValue left, KeyValue right) {
// NOTE: Same method is in CellComparatorImpl, also private, not shared, intentionally. Not
// sharing gets us a few percent more throughput in compares. If changes here or there, make
// sure done in both places.
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
byte leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
byte rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// Compare families.
int leftFamilyOffset = left.getFamilyOffset(leftFamilyLengthPosition);
int rightFamilyOffset = right.getFamilyOffset(rightFamilyLengthPosition);
// Compare qualifiers
return Bytes.compareTo(left.getQualifierArray(), leftFamilyOffset + leftFamilyLength,
leftQualifierLength, right.getQualifierArray(), rightFamilyOffset + rightFamilyLength,
rightQualifierLength);
}
static int compareQualifiers(KeyValue left, ByteBufferKeyValue right) {
// NOTE: Same method is in CellComparatorImpl, also private, not shared, intentionally. Not
// sharing gets us a few percent more throughput in compares. If changes here or there, make
// sure done in both places.
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
byte leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
byte rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// Compare families.
int leftFamilyOffset = left.getFamilyOffset(leftFamilyLengthPosition);
int rightFamilyPosition = right.getFamilyPosition(rightFamilyLengthPosition);
// Compare qualifiers
return ByteBufferUtils.compareTo(left.getQualifierArray(), leftFamilyOffset + leftFamilyLength,
leftQualifierLength, right.getQualifierByteBuffer(), rightFamilyPosition + rightFamilyLength,
rightQualifierLength);
}
static int compareQualifiers(ByteBufferKeyValue left, KeyValue right) {
// NOTE: Same method is in CellComparatorImpl, also private, not shared, intentionally. Not
// sharing gets us a few percent more throughput in compares. If changes here or there, make
// sure done in both places.
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
byte leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
byte rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// Compare families.
int leftFamilyPosition = left.getFamilyPosition(leftFamilyLengthPosition);
int rightFamilyOffset = right.getFamilyOffset(rightFamilyLengthPosition);
// Compare qualifiers
return ByteBufferUtils.compareTo(left.getQualifierByteBuffer(),
leftFamilyPosition + leftFamilyLength, leftQualifierLength, right.getQualifierArray(),
rightFamilyOffset + rightFamilyLength, rightQualifierLength);
}
static int compareQualifiers(ByteBufferKeyValue left, ByteBufferKeyValue right) {
// NOTE: Same method is in CellComparatorImpl, also private, not shared, intentionally. Not
// sharing gets us a few percent more throughput in compares. If changes here or there, make
// sure done in both places.
// Compare Rows. Cache row length.
int leftRowLength = left.getRowLength();
int rightRowLength = right.getRowLength();
int leftFamilyLengthPosition = left.getFamilyLengthPosition(leftRowLength);
byte leftFamilyLength = left.getFamilyLength(leftFamilyLengthPosition);
int leftKeyLength = left.getKeyLength();
int leftQualifierLength =
left.getQualifierLength(leftKeyLength, leftRowLength, leftFamilyLength);
// No need of left row length below here.
int rightFamilyLengthPosition = right.getFamilyLengthPosition(rightRowLength);
byte rightFamilyLength = right.getFamilyLength(rightFamilyLengthPosition);
int rightKeyLength = right.getKeyLength();
int rightQualifierLength =
right.getQualifierLength(rightKeyLength, rightRowLength, rightFamilyLength);
// Compare families.
int leftFamilyPosition = left.getFamilyPosition(leftFamilyLengthPosition);
int rightFamilyPosition = right.getFamilyPosition(rightFamilyLengthPosition);
// Compare qualifiers
return ByteBufferUtils.compareTo(left.getQualifierByteBuffer(),
leftFamilyPosition + leftFamilyLength, leftQualifierLength, right.getQualifierByteBuffer(),
rightFamilyPosition + rightFamilyLength, rightQualifierLength);
}
/**
* Compare the qualifiers part of the left and right cells.
* @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise
*/
@Override
public final int compareQualifiers(Cell left, Cell right) {
if ((left instanceof ByteBufferKeyValue) && (right instanceof ByteBufferKeyValue)) {
return compareQualifiers((ByteBufferKeyValue) left, (ByteBufferKeyValue) right);
} else if ((left instanceof KeyValue) && (right instanceof KeyValue)) {
return compareQualifiers((KeyValue) left, (KeyValue) right);
} else if ((left instanceof KeyValue) && (right instanceof ByteBufferKeyValue)) {
return compareQualifiers((KeyValue) left, (ByteBufferKeyValue) right);
} else if ((left instanceof ByteBufferKeyValue) && (right instanceof KeyValue)) {
return compareQualifiers((ByteBufferKeyValue) left, (KeyValue) right);
} else {
if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getQualifierByteBuffer(),
((ByteBufferExtendedCell) left).getQualifierPosition(), left.getQualifierLength(),
((ByteBufferExtendedCell) right).getQualifierByteBuffer(),
((ByteBufferExtendedCell) right).getQualifierPosition(), right.getQualifierLength());
}
if (left instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getQualifierByteBuffer(),
((ByteBufferExtendedCell) left).getQualifierPosition(), left.getQualifierLength(),
right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength());
}
if (right instanceof ByteBufferExtendedCell) {
// Notice how we flip the order of the compare here. We used to negate the return value but
// see what FindBugs says
// http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO
// It suggest flipping the order to get same effect and 'safer'.
return ByteBufferUtils.compareTo(left.getQualifierArray(), left.getQualifierOffset(),
left.getQualifierLength(), ((ByteBufferExtendedCell) right).getQualifierByteBuffer(),
((ByteBufferExtendedCell) right).getQualifierPosition(), right.getQualifierLength());
}
return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(),
left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(),
right.getQualifierLength());
}
}
/**
* Compares the rows of the left and right cell. For the hbase:meta case this method is overridden
* such that it can handle hbase:meta cells. The caller should ensure using the appropriate
* comparator for hbase:meta.
* @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise
*/
@Override
public int compareRows(final Cell left, final Cell right) {
return compareRows(left, left.getRowLength(), right, right.getRowLength());
}
static int compareRows(final Cell left, int leftRowLength, final Cell right, int rightRowLength) {
// left and right can be exactly the same at the beginning of a row
if (left == right) {
return 0;
}
if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getRowByteBuffer(),
((ByteBufferExtendedCell) left).getRowPosition(), leftRowLength,
((ByteBufferExtendedCell) right).getRowByteBuffer(),
((ByteBufferExtendedCell) right).getRowPosition(), rightRowLength);
}
if (left instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getRowByteBuffer(),
((ByteBufferExtendedCell) left).getRowPosition(), leftRowLength, right.getRowArray(),
right.getRowOffset(), rightRowLength);
}
if (right instanceof ByteBufferExtendedCell) {
// Notice how we flip the order of the compare here. We used to negate the return value but
// see what FindBugs says
// http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO
// It suggest flipping the order to get same effect and 'safer'.
return ByteBufferUtils.compareTo(left.getRowArray(), left.getRowOffset(), leftRowLength,
((ByteBufferExtendedCell) right).getRowByteBuffer(),
((ByteBufferExtendedCell) right).getRowPosition(), rightRowLength);
}
return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), leftRowLength,
right.getRowArray(), right.getRowOffset(), rightRowLength);
}
/**
* Compares the row part of the cell with a simple plain byte[] like the stopRow in Scan. This
* should be used with context where for hbase:meta cells the
* {{@link MetaCellComparator#META_COMPARATOR} should be used the cell to be compared the kv
* serialized byte[] to be compared with the offset in the byte[] the length in the byte[]
* @return 0 if both cell and the byte[] are equal, 1 if the cell is bigger than byte[], -1
* otherwise
*/
@Override
public int compareRows(Cell left, byte[] right, int roffset, int rlength) {
if (left instanceof ByteBufferExtendedCell) {
return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getRowByteBuffer(),
((ByteBufferExtendedCell) left).getRowPosition(), left.getRowLength(), right, roffset,
rlength);
}
return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right,
roffset, rlength);
}
@Override
public final int compareWithoutRow(final Cell left, final Cell right) {
// If the column is not specified, the "minimum" key type appears the
// latest in the sorted order, regardless of the timestamp. This is used
// for specifying the last key/value in a given row, because there is no
// "lexicographically last column" (it would be infinitely long). The
// "maximum" key type does not need this behavior.
// Copied from KeyValue. This is bad in that we can't do memcmp w/ special rules like this.
int lFamLength = left.getFamilyLength();
int rFamLength = right.getFamilyLength();
int lQualLength = left.getQualifierLength();
int rQualLength = right.getQualifierLength();
byte leftType = PrivateCellUtil.getTypeByte(left);
byte rightType = PrivateCellUtil.getTypeByte(right);
if (lFamLength + lQualLength == 0 && leftType == KeyValue.Type.Minimum.getCode()) {
// left is "bigger", i.e. it appears later in the sorted order
return 1;
}
if (rFamLength + rQualLength == 0 && rightType == KeyValue.Type.Minimum.getCode()) {
return -1;
}
if (lFamLength != rFamLength) {
// comparing column family is enough.
return compareFamilies(left, lFamLength, right, rFamLength);
}
// Compare cf:qualifier
int diff = compareColumns(left, lFamLength, lQualLength, right, rFamLength, rQualLength);
if (diff != 0) {
return diff;
}
diff = compareTimestamps(left.getTimestamp(), right.getTimestamp());
if (diff != 0) {
return diff;
}
// Compare types. Let the delete types sort ahead of puts; i.e. types
// of higher numbers sort before those of lesser numbers. Maximum (255)
// appears ahead of everything, and minimum (0) appears after
// everything.
return (0xff & rightType) - (0xff & leftType);
}
@Override
public int compareTimestamps(final Cell left, final Cell right) {
return compareTimestamps(left.getTimestamp(), right.getTimestamp());
}
@Override
public int compareTimestamps(final long ltimestamp, final long rtimestamp) {
// Swap order we pass into compare so we get DESCENDING order.
return Long.compare(rtimestamp, ltimestamp);
}
@Override
public Comparator<Cell> getSimpleComparator() {
return this;
}
/**
* Utility method that makes a guess at comparator to use based off passed tableName. Use in
* extreme when no comparator specified.
* @return CellComparator to use going off the {@code tableName} passed.
*/
public static CellComparator getCellComparator(TableName tableName) {
return getCellComparator(tableName.toBytes());
}
/**
* Utility method that makes a guess at comparator to use based off passed tableName. Use in
* extreme when no comparator specified.
* @return CellComparator to use going off the {@code tableName} passed.
*/
public static CellComparator getCellComparator(byte[] tableName) {
// FYI, TableName.toBytes does not create an array; just returns existing array pointer.
return Bytes.equals(tableName, TableName.META_TABLE_NAME.toBytes())
? MetaCellComparator.META_COMPARATOR
: CellComparatorImpl.COMPARATOR;
}
}
|
apache/phoenix | 36,472 | phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/MetaDataRegionObserver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.coprocessor;
import static org.apache.phoenix.query.QueryServices.STATS_COLLECTION_ENABLED;
import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_STATS_COLLECTION_ENABLED;
import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.TimerTask;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.annotation.concurrent.GuardedBy;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.cache.GlobalCache;
import org.apache.phoenix.compile.MutationPlan;
import org.apache.phoenix.compile.PostDDLCompiler;
import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
import org.apache.phoenix.coprocessorclient.MetaDataProtocol;
import org.apache.phoenix.execute.MutationState;
import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
import org.apache.phoenix.index.IndexMaintainer;
import org.apache.phoenix.index.PhoenixIndexCodec;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.jdbc.PhoenixDriver;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.PIndexState;
import org.apache.phoenix.schema.PName;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.TableRef;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.EnvironmentEdgeManager;
import org.apache.phoenix.util.IndexUtil;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.QueryUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.ScanUtil;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.ServerUtil;
import org.apache.phoenix.util.UpgradeUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.phoenix.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.phoenix.thirdparty.com.google.common.collect.ImmutableList;
import org.apache.phoenix.thirdparty.com.google.common.collect.Lists;
import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
import org.apache.phoenix.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* Coprocessor for metadata related operations. This coprocessor would only be registered to
* SYSTEM.TABLE.
*/
@SuppressWarnings("deprecation")
public class MetaDataRegionObserver implements RegionObserver, RegionCoprocessor {
public static final Logger LOGGER = LoggerFactory.getLogger(MetaDataRegionObserver.class);
public static final String REBUILD_INDEX_APPEND_TO_URL_STRING = "REBUILDINDEX";
// PHOENIX-5094 To differentiate the increment in PENDING_DISABLE_COUNT made by client or index
// rebuilder, we are using large value for index rebuilder
public static final long PENDING_DISABLE_INACTIVE_STATE_COUNT = 10000L;
private static final byte[] SYSTEM_CATALOG_KEY = SchemaUtil.getTableKey(ByteUtil.EMPTY_BYTE_ARRAY,
QueryConstants.SYSTEM_SCHEMA_NAME_BYTES, PhoenixDatabaseMetaData.SYSTEM_CATALOG_TABLE_BYTES);
protected ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1);
private ScheduledThreadPoolExecutor truncateTaskExectuor = new ScheduledThreadPoolExecutor(1,
new ThreadFactoryBuilder().setDaemon(true).setNameFormat("task-truncated-%d").build());
private boolean enableRebuildIndex = QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD;
private long rebuildIndexTimeInterval =
QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD_INTERVAL;
private static Map<PName, Long> batchExecutedPerTableMap = new HashMap<PName, Long>();
@GuardedBy("MetaDataRegionObserver.class")
private static Properties rebuildIndexConnectionProps;
// Added for test purposes
private long initialRebuildTaskDelay;
private long statsTruncateTaskDelay;
@Override
public void preClose(final ObserverContext<RegionCoprocessorEnvironment> c,
boolean abortRequested) {
executor.shutdownNow();
GlobalCache.getInstance(c.getEnvironment()).getMetaDataCache().invalidateAll();
}
@Override
public Optional<RegionObserver> getRegionObserver() {
return Optional.of(this);
}
@Override
public void start(CoprocessorEnvironment env) throws IOException {
// sleep a little bit to compensate time clock skew when SYSTEM.CATALOG moves
// among region servers because we relies on server time of RS which is hosting
// SYSTEM.CATALOG
Configuration config = env.getConfiguration();
long sleepTime = config.getLong(QueryServices.CLOCK_SKEW_INTERVAL_ATTRIB,
QueryServicesOptions.DEFAULT_CLOCK_SKEW_INTERVAL);
try {
if (sleepTime > 0) {
Thread.sleep(sleepTime);
}
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
enableRebuildIndex = config.getBoolean(QueryServices.INDEX_FAILURE_HANDLING_REBUILD_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD);
rebuildIndexTimeInterval =
config.getLong(QueryServices.INDEX_FAILURE_HANDLING_REBUILD_INTERVAL_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD_INTERVAL);
initialRebuildTaskDelay = config.getLong(QueryServices.INDEX_REBUILD_TASK_INITIAL_DELAY,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_TASK_INITIAL_DELAY);
statsTruncateTaskDelay = config.getLong(QueryServices.START_TRUNCATE_TASK_DELAY,
QueryServicesOptions.DEFAULT_START_TRUNCATE_TASK_DELAY);
}
@Override
public void postOpen(ObserverContext<RegionCoprocessorEnvironment> e) {
final RegionCoprocessorEnvironment env = e.getEnvironment();
Runnable r = new Runnable() {
@Override
public void run() {
Table metaTable = null;
Table statsTable = null;
try {
ReadOnlyProps props = new ReadOnlyProps(env.getConfiguration().iterator());
Thread.sleep(1000);
metaTable = env.getConnection().getTable(
SchemaUtil.getPhysicalName(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, props));
statsTable = env.getConnection().getTable(
SchemaUtil.getPhysicalName(PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES, props));
final Table mTable = metaTable;
final Table sTable = statsTable;
User.runAsLoginUser(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
if (UpgradeUtil.truncateStats(mTable, sTable)) {
LOGGER.info("Stats are successfully truncated for upgrade 4.7!!");
}
return null;
}
});
} catch (Exception exception) {
LOGGER.warn("Exception while truncate stats..,"
+ " please check and delete stats manually inorder to get proper result with old client!!");
LOGGER.warn(exception.getStackTrace().toString());
} finally {
try {
if (metaTable != null) {
metaTable.close();
}
if (statsTable != null) {
statsTable.close();
}
} catch (IOException e) {
}
}
}
};
if (
env.getConfiguration().getBoolean(STATS_COLLECTION_ENABLED, DEFAULT_STATS_COLLECTION_ENABLED)
) {
truncateTaskExectuor.schedule(r, statsTruncateTaskDelay, TimeUnit.MILLISECONDS);
} else {
LOGGER.info("Stats collection is disabled");
}
if (!enableRebuildIndex) {
LOGGER.info("Failure Index Rebuild is skipped by configuration.");
return;
}
// Ensure we only run one of the index rebuilder tasks
if (ServerUtil.isKeyInRegion(SYSTEM_CATALOG_KEY, e.getEnvironment().getRegion())) {
try {
Class.forName(PhoenixDriver.class.getName());
initRebuildIndexConnectionProps(e.getEnvironment().getConfiguration());
// starts index rebuild schedule work
BuildIndexScheduleTask task = new BuildIndexScheduleTask(e.getEnvironment());
executor.scheduleWithFixedDelay(task, initialRebuildTaskDelay, rebuildIndexTimeInterval,
TimeUnit.MILLISECONDS);
} catch (ClassNotFoundException ex) {
LOGGER.error("BuildIndexScheduleTask cannot start!", ex);
}
}
}
/**
* Task runs periodically to build indexes whose INDEX_NEED_PARTIALLY_REBUILD is set true
*/
public static class BuildIndexScheduleTask extends TimerTask {
RegionCoprocessorEnvironment env;
private final long rebuildIndexBatchSize;
private final long configuredBatches;
private final long indexDisableTimestampThreshold;
private final long pendingDisableThreshold;
private final ReadOnlyProps props;
private final List<String> onlyTheseTables;
public BuildIndexScheduleTask(RegionCoprocessorEnvironment env) {
this(env, null);
}
public BuildIndexScheduleTask(RegionCoprocessorEnvironment env, List<String> onlyTheseTables) {
this.onlyTheseTables = onlyTheseTables == null ? null : ImmutableList.copyOf(onlyTheseTables);
this.env = env;
Configuration configuration = env.getConfiguration();
this.rebuildIndexBatchSize = configuration
.getLong(QueryServices.INDEX_FAILURE_HANDLING_REBUILD_PERIOD, HConstants.LATEST_TIMESTAMP);
this.configuredBatches = configuration
.getLong(QueryServices.INDEX_FAILURE_HANDLING_REBUILD_NUMBER_OF_BATCHES_PER_TABLE, 10);
this.indexDisableTimestampThreshold =
configuration.getLong(QueryServices.INDEX_REBUILD_DISABLE_TIMESTAMP_THRESHOLD,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_DISABLE_TIMESTAMP_THRESHOLD);
this.pendingDisableThreshold =
configuration.getLong(QueryServices.INDEX_PENDING_DISABLE_THRESHOLD,
QueryServicesOptions.DEFAULT_INDEX_PENDING_DISABLE_THRESHOLD);
this.props = new ReadOnlyProps(env.getConfiguration().iterator());
}
public List<PTable> decrementIndexesPendingDisableCount(PhoenixConnection conn,
PTable dataPTable, List<PTable> indexes) {
List<PTable> indexesIncremented = new ArrayList<>();
for (PTable index : indexes) {
try {
String indexName = index.getName().getString();
IndexUtil.incrementCounterForIndex(conn, indexName,
-PENDING_DISABLE_INACTIVE_STATE_COUNT);
indexesIncremented.add(index);
} catch (Exception e) {
LOGGER.warn("Decrement of -" + PENDING_DISABLE_INACTIVE_STATE_COUNT + " for index :"
+ index.getName().getString() + "of table: " + dataPTable.getName().getString(), e);
}
}
return indexesIncremented;
}
@Override
public void run() {
// FIXME: we should replay the data table Put, as doing a partial index build would only add
// the new rows and not delete the previous index value. Also, we should restrict the scan
// to only data within this region (as otherwise *every* region will be running this code
// separately, all updating the same data.
RegionScanner scanner = null;
PhoenixConnection conn = null;
try {
Scan scan = new Scan();
SingleColumnValueFilter filter =
new SingleColumnValueFilter(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES, CompareOperator.NOT_EQUAL,
PLong.INSTANCE.toBytes(0L));
filter.setFilterIfMissing(true);
scan.setFilter(filter);
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.TABLE_NAME_BYTES);
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.DATA_TABLE_NAME_BYTES);
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_STATE_BYTES);
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES);
Map<PTable, List<Pair<PTable, Long>>> dataTableToIndexesMap = null;
boolean hasMore = false;
List<Cell> results = new ArrayList<Cell>();
scanner = this.env.getRegion().getScanner(scan);
do {
results.clear();
hasMore = scanner.next(results);
if (results.isEmpty()) {
LOGGER.debug("Found no indexes with non zero INDEX_DISABLE_TIMESTAMP");
break;
}
Result r = Result.create(results);
byte[] disabledTimeStamp = r.getValue(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES);
Cell indexStateCell = r.getColumnLatestCell(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_STATE_BYTES);
if (disabledTimeStamp == null || disabledTimeStamp.length == 0) {
LOGGER.debug("Null or empty INDEX_DISABLE_TIMESTAMP");
continue;
}
byte[] dataTable = r.getValue(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.DATA_TABLE_NAME_BYTES);
if ((dataTable == null || dataTable.length == 0) || indexStateCell == null) {
// data table name can't be empty
LOGGER.debug("Null or data table name or index state");
continue;
}
byte[] indexStateBytes = CellUtil.cloneValue(indexStateCell);
byte[][] rowKeyMetaData = new byte[3][];
SchemaUtil.getVarChars(r.getRow(), 3, rowKeyMetaData);
byte[] schemaName = rowKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
byte[] indexTable = rowKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
// validity check
if (indexTable == null || indexTable.length == 0) {
LOGGER.debug("We find IndexTable empty during rebuild scan:" + scan
+ "so, Index rebuild has been skipped for row=" + r);
continue;
}
String dataTableFullName = SchemaUtil.getTableName(schemaName, dataTable);
if (onlyTheseTables != null && !onlyTheseTables.contains(dataTableFullName)) {
LOGGER.debug("Could not find " + dataTableFullName + " in " + onlyTheseTables);
continue;
}
if (conn == null) {
conn = getRebuildIndexConnection(env.getConfiguration());
dataTableToIndexesMap = Maps.newHashMap();
}
PTable dataPTable = conn.getTableNoCache(dataTableFullName);
String indexTableFullName = SchemaUtil.getTableName(schemaName, indexTable);
PTable indexPTable = conn.getTableNoCache(indexTableFullName);
// Sanity check in case index was removed from table
if (!dataPTable.getIndexes().contains(indexPTable)) {
LOGGER
.debug(dataTableFullName + " does not contain " + indexPTable.getName().getString());
continue;
}
PIndexState indexState = PIndexState.fromSerializedValue(indexStateBytes[0]);
long pendingDisableCountLastUpdatedTs =
IndexUtil.getIndexPendingDisableCountLastUpdatedTimestamp(conn, indexTableFullName);
long elapsedSinceDisable =
EnvironmentEdgeManager.currentTimeMillis() - pendingDisableCountLastUpdatedTs;
// on an index write failure, the server side transitions to PENDING_DISABLE, then the
// client
// retries, and after retries are exhausted, disables the index
if (indexState == PIndexState.PENDING_DISABLE) {
if (elapsedSinceDisable > pendingDisableThreshold) {
// too long in PENDING_DISABLE -
// client didn't disable the index because last time when
// PENDING_DISABLE_COUNT was updated is greater than pendingDisableThreshold,
// so we do it here
IndexUtil.updateIndexState(conn, indexTableFullName, PIndexState.DISABLE,
pendingDisableCountLastUpdatedTs);
}
continue;
}
// Only perform relatively expensive check for all regions online when index
// is disabled or pending active since that's the state it's placed into when
// an index write fails.
if (
(indexState.isDisabled() || indexState == PIndexState.PENDING_ACTIVE)
&& !tableRegionsOnline(this.env.getConfiguration(), indexPTable)
) {
LOGGER.debug("Index rebuild has been skipped because not all regions of"
+ " index table=" + indexPTable.getName() + " are online.");
continue;
}
if (elapsedSinceDisable > indexDisableTimestampThreshold) {
/*
* It has been too long since the index has been disabled and any future attempts to
* reenable it likely will fail. So we are going to mark the index as disabled and set
* the index disable timestamp to 0 so that the rebuild task won't pick up this index
* again for rebuild.
*/
try {
IndexUtil.updateIndexState(conn, indexTableFullName, PIndexState.DISABLE, 0l);
LOGGER.error("Unable to rebuild index " + indexTableFullName
+ ". Won't attempt again since index disable timestamp is"
+ " older than current time by " + indexDisableTimestampThreshold
+ " milliseconds. Manual intervention needed to re-build" + " the index");
} catch (Throwable ex) {
LOGGER.error("Unable to mark index " + indexTableFullName + " as disabled.", ex);
}
continue; // don't attempt another rebuild irrespective of whether
// updateIndexState worked or not
}
// Allow index to begin incremental maintenance as index is back online and we
// cannot transition directly from DISABLED -> ACTIVE
if (indexState == PIndexState.DISABLE) {
if (
IndexUtil.getIndexPendingDisableCount(conn, indexTableFullName)
< PENDING_DISABLE_INACTIVE_STATE_COUNT
) {
// to avoid incrementing again
IndexUtil.incrementCounterForIndex(conn, indexTableFullName,
PENDING_DISABLE_INACTIVE_STATE_COUNT);
}
IndexUtil.updateIndexState(conn, indexTableFullName, PIndexState.INACTIVE, null);
continue; // Must wait until clients start to do index maintenance again
} else if (indexState == PIndexState.PENDING_ACTIVE) {
IndexUtil.updateIndexState(conn, indexTableFullName, PIndexState.ACTIVE, null);
continue; // Must wait until clients start to do index maintenance again
} else if (indexState != PIndexState.INACTIVE && indexState != PIndexState.ACTIVE) {
LOGGER.warn("Unexpected index state of " + indexTableFullName + "=" + indexState
+ ". Skipping partial rebuild attempt.");
continue;
}
long currentTime = EnvironmentEdgeManager.currentTimeMillis();
long forwardOverlapDurationMs = env.getConfiguration().getLong(
QueryServices.INDEX_FAILURE_HANDLING_REBUILD_OVERLAP_FORWARD_TIME_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD_OVERLAP_FORWARD_TIME);
// Wait until no failures have occurred in at least forwardOverlapDurationMs
if (indexStateCell.getTimestamp() + forwardOverlapDurationMs > currentTime) {
LOGGER.debug("Still must wait "
+ (indexStateCell.getTimestamp() + forwardOverlapDurationMs - currentTime)
+ " before starting rebuild for " + indexTableFullName);
continue; // Haven't waited long enough yet
}
Long upperBoundOfRebuild = indexStateCell.getTimestamp() + forwardOverlapDurationMs;
// Pass in upperBoundOfRebuild when setting index state or increasing disable ts
// and fail if index timestamp > upperBoundOfRebuild.
List<Pair<PTable, Long>> indexesToPartiallyRebuild =
dataTableToIndexesMap.get(dataPTable);
if (indexesToPartiallyRebuild == null) {
indexesToPartiallyRebuild =
Lists.newArrayListWithExpectedSize(dataPTable.getIndexes().size());
dataTableToIndexesMap.put(dataPTable, indexesToPartiallyRebuild);
}
LOGGER.debug("We have found " + indexPTable.getIndexState() + " Index:"
+ indexPTable.getName() + " on data table:" + dataPTable.getName()
+ " which failed to be updated at " + indexPTable.getIndexDisableTimestamp());
indexesToPartiallyRebuild.add(new Pair<PTable, Long>(indexPTable, upperBoundOfRebuild));
} while (hasMore);
if (dataTableToIndexesMap != null) {
long backwardOverlapDurationMs = env.getConfiguration().getLong(
QueryServices.INDEX_FAILURE_HANDLING_REBUILD_OVERLAP_BACKWARD_TIME_ATTRIB,
env.getConfiguration().getLong(
QueryServices.INDEX_FAILURE_HANDLING_REBUILD_OVERLAP_TIME_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_FAILURE_HANDLING_REBUILD_OVERLAP_BACKWARD_TIME));
for (Map.Entry<PTable, List<Pair<PTable, Long>>> entry : dataTableToIndexesMap
.entrySet()) {
PTable dataPTable = entry.getKey();
List<Pair<PTable, Long>> pairs = entry.getValue();
List<PTable> indexesToPartiallyRebuild =
Lists.newArrayListWithExpectedSize(pairs.size());
try (Table metaTable = env.getConnection().getTable(SchemaUtil
.getPhysicalName(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES, props))) {
long earliestDisableTimestamp = Long.MAX_VALUE;
long latestUpperBoundTimestamp = Long.MIN_VALUE;
List<IndexMaintainer> maintainers = Lists.newArrayListWithExpectedSize(pairs.size());
int signOfDisableTimeStamp = 0;
for (Pair<PTable, Long> pair : pairs) {
// We need a way of differentiating the block writes to data table case from
// the leave index active case. In either case, we need to know the time stamp
// at which writes started failing so we can rebuild from that point. If we
// keep the index active *and* have a positive INDEX_DISABLE_TIMESTAMP_BYTES,
// then writes to the data table will be blocked (this is client side logic
// and we can't change this in a minor release). So we use the sign of the
// time stamp to differentiate.
PTable index = pair.getFirst();
Long upperBoundTimestamp = pair.getSecond();
long disabledTimeStampVal = index.getIndexDisableTimestamp();
if (disabledTimeStampVal != 0) {
if (
signOfDisableTimeStamp != 0
&& signOfDisableTimeStamp != Long.signum(disabledTimeStampVal)
) {
LOGGER
.warn("Found unexpected mix of signs with " + "INDEX_DISABLE_TIMESTAMP for "
+ dataPTable.getName().getString() + " with " + indexesToPartiallyRebuild);
}
signOfDisableTimeStamp = Long.signum(disabledTimeStampVal);
disabledTimeStampVal = Math.abs(disabledTimeStampVal);
if (disabledTimeStampVal < earliestDisableTimestamp) {
earliestDisableTimestamp = disabledTimeStampVal;
}
indexesToPartiallyRebuild.add(index);
maintainers.add(index.getIndexMaintainer(dataPTable, conn));
}
if (upperBoundTimestamp > latestUpperBoundTimestamp) {
latestUpperBoundTimestamp = upperBoundTimestamp;
}
}
// No indexes are disabled, so skip this table
if (earliestDisableTimestamp == Long.MAX_VALUE) {
LOGGER.debug("No indexes are disabled so continuing");
continue;
}
long scanBeginTime =
Math.max(0, earliestDisableTimestamp - backwardOverlapDurationMs);
long scanEndTime =
Math.min(latestUpperBoundTimestamp, getTimestampForBatch(scanBeginTime,
batchExecutedPerTableMap.get(dataPTable.getName())));
LOGGER
.info("Starting to build " + dataPTable + " indexes " + indexesToPartiallyRebuild
+ " from timestamp=" + scanBeginTime + " until " + scanEndTime);
TableRef tableRef =
new TableRef(null, dataPTable, HConstants.LATEST_TIMESTAMP, false);
// TODO Need to set high timeout
PostDDLCompiler compiler = new PostDDLCompiler(conn);
MutationPlan plan = compiler.compile(Collections.singletonList(tableRef), null, null,
null, scanEndTime);
Scan dataTableScan =
IndexManagementUtil.newLocalStateScan(plan.getContext().getScan(), maintainers);
dataTableScan.setTimeRange(scanBeginTime, scanEndTime);
dataTableScan.setCacheBlocks(false);
dataTableScan.setAttribute(BaseScannerRegionObserverConstants.REBUILD_INDEXES,
TRUE_BYTES);
ImmutableBytesWritable indexMetaDataPtr =
new ImmutableBytesWritable(ByteUtil.EMPTY_BYTE_ARRAY);
IndexMaintainer.serializeAdditional(dataPTable, indexMetaDataPtr,
indexesToPartiallyRebuild, conn);
byte[] attribValue = ByteUtil.copyKeyBytesIfNecessary(indexMetaDataPtr);
// TODO : use array of index names as Scan attribute for only
// specific index maintainer lookup at the server side.
// ScanUtil.setWALAnnotationAttributes(dataPTable, dataTableScan);
dataTableScan.setAttribute(PhoenixIndexCodec.INDEX_PROTO_MD, attribValue);
ScanUtil.setClientVersion(dataTableScan, MetaDataProtocol.PHOENIX_VERSION);
LOGGER.info("Starting to partially build indexes:" + indexesToPartiallyRebuild
+ " on data table:" + dataPTable.getName() + " with the earliest disable timestamp:"
+ earliestDisableTimestamp + " till "
+ (scanEndTime == HConstants.LATEST_TIMESTAMP ? "LATEST_TIMESTAMP" : scanEndTime));
MutationState mutationState = plan.execute();
long rowCount = mutationState.getUpdateCount();
decrementIndexesPendingDisableCount(conn, dataPTable, indexesToPartiallyRebuild);
if (scanEndTime == latestUpperBoundTimestamp) {
LOGGER.info("Rebuild completed for all inactive/disabled indexes in data table:"
+ dataPTable.getName());
}
LOGGER.info(" no. of datatable rows read in rebuilding process is " + rowCount);
for (PTable indexPTable : indexesToPartiallyRebuild) {
String indexTableFullName = SchemaUtil.getTableName(
indexPTable.getSchemaName().getString(), indexPTable.getTableName().getString());
try {
if (scanEndTime == latestUpperBoundTimestamp) {
IndexUtil.updateIndexState(conn, indexTableFullName, PIndexState.ACTIVE, 0L,
latestUpperBoundTimestamp);
batchExecutedPerTableMap.remove(dataPTable.getName());
LOGGER.info(
"Making Index:" + indexPTable.getTableName() + " active after rebuilding");
} else {
// Increment timestamp so that client sees updated disable timestamp
IndexUtil.updateIndexState(conn, indexTableFullName,
indexPTable.getIndexState(), scanEndTime * signOfDisableTimeStamp,
latestUpperBoundTimestamp);
Long noOfBatches = batchExecutedPerTableMap.get(dataPTable.getName());
if (noOfBatches == null) {
noOfBatches = 0l;
}
batchExecutedPerTableMap.put(dataPTable.getName(), ++noOfBatches);
LOGGER.info(
"During Round-robin build: Successfully updated index disabled timestamp for "
+ indexTableFullName + " to " + scanEndTime);
}
} catch (SQLException e) {
LOGGER.error("Unable to rebuild " + dataPTable + " index " + indexTableFullName,
e);
}
}
} catch (Exception e) {
LOGGER.error(
"Unable to rebuild " + dataPTable + " indexes " + indexesToPartiallyRebuild, e);
}
}
}
} catch (Throwable t) {
LOGGER.warn("ScheduledBuildIndexTask failed!", t);
} finally {
if (scanner != null) {
try {
scanner.close();
} catch (IOException ignored) {
LOGGER.debug("ScheduledBuildIndexTask can't close scanner.", ignored);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
LOGGER.debug("ScheduledBuildIndexTask can't close connection", ignored);
}
}
}
}
private long getTimestampForBatch(long disabledTimeStamp, Long noOfBatches) {
if (
disabledTimeStamp < 0
|| rebuildIndexBatchSize > (HConstants.LATEST_TIMESTAMP - disabledTimeStamp)
) {
return HConstants.LATEST_TIMESTAMP;
}
long timestampForNextBatch = disabledTimeStamp + rebuildIndexBatchSize;
if (
timestampForNextBatch < 0
|| timestampForNextBatch > EnvironmentEdgeManager.currentTimeMillis()
|| (noOfBatches != null && noOfBatches > configuredBatches)
) {
// if timestampForNextBatch cross current time , then we should
// build the complete index
timestampForNextBatch = HConstants.LATEST_TIMESTAMP;
}
return timestampForNextBatch;
}
}
@VisibleForTesting
public static synchronized void initRebuildIndexConnectionProps(Configuration config) {
if (rebuildIndexConnectionProps == null) {
Properties props = new Properties();
long indexRebuildQueryTimeoutMs =
config.getLong(QueryServices.INDEX_REBUILD_QUERY_TIMEOUT_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_QUERY_TIMEOUT);
long indexRebuildRPCTimeoutMs = config.getLong(QueryServices.INDEX_REBUILD_RPC_TIMEOUT_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_RPC_TIMEOUT);
long indexRebuildClientScannerTimeOutMs =
config.getLong(QueryServices.INDEX_REBUILD_CLIENT_SCANNER_TIMEOUT_ATTRIB,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_CLIENT_SCANNER_TIMEOUT);
int indexRebuildRpcRetriesCounter =
config.getInt(QueryServices.INDEX_REBUILD_RPC_RETRIES_COUNTER,
QueryServicesOptions.DEFAULT_INDEX_REBUILD_RPC_RETRIES_COUNTER);
// Set various phoenix and hbase level timeouts and rpc retries
props.setProperty(QueryServices.THREAD_TIMEOUT_MS_ATTRIB,
Long.toString(indexRebuildQueryTimeoutMs));
props.setProperty(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD,
Long.toString(indexRebuildClientScannerTimeOutMs));
props.setProperty(HConstants.HBASE_RPC_TIMEOUT_KEY, Long.toString(indexRebuildRPCTimeoutMs));
props.setProperty(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
Long.toString(indexRebuildRpcRetriesCounter));
// don't run a second index populations upsert select
props.setProperty(QueryServices.INDEX_POPULATION_SLEEP_TIME, "0");
rebuildIndexConnectionProps = PropertiesUtil.combineProperties(props, config);
}
}
public static PhoenixConnection getRebuildIndexConnection(Configuration config)
throws SQLException {
initRebuildIndexConnectionProps(config);
// return QueryUtil.getConnectionOnServer(rebuildIndexConnectionProps,
// config).unwrap(PhoenixConnection.class);
return QueryUtil.getConnectionOnServerWithCustomUrl(rebuildIndexConnectionProps,
REBUILD_INDEX_APPEND_TO_URL_STRING).unwrap(PhoenixConnection.class);
}
public static boolean tableRegionsOnline(Configuration conf, PTable table) {
try (Connection hcon = ConnectionFactory.createConnection(conf)) {
Admin admin = hcon.getAdmin();
List<RegionInfo> regionInfos =
admin.getRegions(TableName.valueOf(table.getPhysicalName().getBytes()));
// This makes Number of Regions RPC calls sequentially.
// For large tables this can be slow.
for (RegionInfo regionInfo : regionInfos) {
try {
// We don't actually care about the compaction state, we are only calling this
// because this will trigger a call to the RS (from master), and we want to make
// sure that all RSs are available
// There are only a few methods in HBase 3.0 that are directly calling the RS,
// this is one of them.
admin.getCompactionStateForRegion(regionInfo.getRegionName());
// This used to make a direct RPC call to the region, but HBase 3 makes that
// very hard (needs reflection, or a bridge class in the same package),
// and it's not necessary for checking the RS liveness
} catch (IOException e) {
LOGGER
.debug("Cannot get region " + regionInfo.getEncodedName() + " info due to error:" + e);
return false;
}
}
} catch (IOException ex) {
LOGGER.warn("tableRegionsOnline failed due to:", ex);
return false;
}
return true;
}
}
|
apache/sentry | 36,098 | sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/HMSPaths.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.hdfs;
import java.util.*;
import com.google.common.base.Joiner;
import org.apache.hadoop.fs.Path;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A non thread-safe implementation of {@link AuthzPaths}. It abstracts over the
* core data-structures used to efficiently handle request from clients of
* the {@link AuthzPaths} paths. All updates to this class is handled by the
* thread safe {@link UpdateableAuthzPaths} class
*/
public class HMSPaths implements AuthzPaths {
private static final Logger LOG = LoggerFactory.getLogger(HMSPaths.class);
@VisibleForTesting
static List<String> getPathElements(String path) {
String trimmedPath = path.trim();
if (trimmedPath.charAt(0) != Path.SEPARATOR_CHAR) {
throw new IllegalArgumentException("It must be an absolute path: " +
trimmedPath);
}
List<String> list = new ArrayList<String>(32);
int idx = 0;
int found = trimmedPath.indexOf(Path.SEPARATOR_CHAR, idx);
while (found > -1) {
if (found > idx) {
list.add(trimmedPath.substring(idx, found));
}
idx = found + 1;
found = trimmedPath.indexOf(Path.SEPARATOR_CHAR, idx);
}
if (idx < trimmedPath.length()) {
list.add(trimmedPath.substring(idx));
}
return list;
}
@VisibleForTesting
static List<List<String>> getPathsElements(List<String> paths) {
List<List<String>> pathsElements = new ArrayList<List<String>>(paths.size());
for (String path : paths) {
pathsElements.add(getPathElements(path));
}
return pathsElements;
}
// used for more compact logging
static List<String> assemblePaths(List<List<String>> pathElements) {
if (pathElements == null) {
return Collections.emptyList();
}
List<String> paths = new ArrayList<>(pathElements.size());
for (List<String> path : pathElements) {
StringBuffer sb = new StringBuffer();
for (String elem : path) {
sb.append(Path.SEPARATOR_CHAR).append(elem);
}
paths.add(sb.toString());
}
return paths;
}
@VisibleForTesting
enum EntryType {
DIR(true),
PREFIX(false),
AUTHZ_OBJECT(false);
private boolean removeIfDangling;
private EntryType(boolean removeIfDangling) {
this.removeIfDangling = removeIfDangling;
}
public boolean isRemoveIfDangling() {
return removeIfDangling;
}
public byte getByte() {
return (byte)toString().charAt(0);
}
public static EntryType fromByte(byte b) {
switch (b) {
case ((byte)'D'):
return DIR;
case ((byte)'P'):
return PREFIX;
case ((byte)'A'):
return AUTHZ_OBJECT;
default:
return null;
}
}
}
/**
* Entry represents a node in the tree that {@see HMSPaths} uses to organize the auth objects.
* This tree maps the entries in the filesystem namespace in HDFS, and the auth objects are
* associated to each entry.
*
* Each individual entry in the tree contains a children map that maps the path element
* (filename) to the child entry.
*
* For example, for a HDFS file or directory, "hdfs://foo/bar", it is presented in HMSPaths as the
* following tree, of which the root node is {@link HMSPaths#root}.
*
* Entry("/", children: {
* "foo": Entry("foo", children: {
* "bar": Entry("bar"),
* "zoo": Entry("zoo"),
* }),
* "tmp": Entry("tmp"),
* ...
* });
*
* Note that the URI scheme is not presented in the tree.
*/
@VisibleForTesting
static class Entry {
private Entry parent;
private EntryType type;
private String pathElement;
// A set (or single object when set size is 1) of authorizable objects associated
// with this entry. Authorizable object should be case insensitive. The set is
// allocated lazily to avoid wasting memory due to empty sets.
private Object authzObjs;
// Path of child element to the path entry mapping, e.g. 'b' -> '/a/b'
// This is allocated lazily to avoid wasting memory due to empty maps.
private Map<String, Entry> children;
/**
* Construct an Entry with one authzObj.
*
* @param parent the parent node. If not specified, this entry is a root entry.
* @param pathElement the path element of this entry on the tree.
* @param type Entry type.
* @param authzObj the authzObj.
*/
Entry(Entry parent, String pathElement, EntryType type, String authzObj) {
this.parent = parent;
this.type = type;
this.pathElement = pathElement.intern();
addAuthzObj(authzObj);
}
/**
* Construct an Entry with a set of authz objects.
* @param parent the parent node. If not specified, this entry is a root entry.
* @param pathElement the path element of this entry on the tree.
* @param type entry type.
* @param authzObjs a collection of authz objects.
*/
Entry(Entry parent, String pathElement, EntryType type, Collection<String> authzObjs) {
this.parent = parent;
this.type = type;
this.pathElement = pathElement.intern();
addAuthzObjs(authzObjs);
}
Entry getChild(String pathElement) {
if (children == null) {
return null;
}
return children.get(pathElement);
}
void putChild(String pathElement, Entry entry) {
if (children == null) {
// We allocate this map lazily and with small initial capacity to avoid
// memory waste due to empty and underpopulated maps.
children = new HashMap<>(2);
}
if (LOG.isDebugEnabled()) {
LOG.debug("[putChild]Adding {} as child to {}", entry.toString(), this.toString());
}
children.put(pathElement.intern(), entry);
}
Entry removeChild(String pathElement) {
if (LOG.isDebugEnabled()) {
LOG.debug("[removeChild]Removing {} from children", pathElement);
}
return children.remove(pathElement);
}
boolean hasChildren() { return children != null && !children.isEmpty(); }
int numChildren() { return children == null ? 0 : children.size(); }
Collection<Entry> childrenValues() {
return children != null ? children.values() : Collections.<Entry>emptyList();
}
void clearAuthzObjs() {
if (LOG.isDebugEnabled()) {
LOG.debug("Clearing authzObjs from {}", this.toString());
}
authzObjs = null;
}
void removeAuthzObj(String authzObj) {
if (LOG.isDebugEnabled()) {
LOG.debug("Removing {} from {}", authzObj, authzObjs);
}
if (authzObjs != null) {
if (authzObjs instanceof Set) {
Set<String> authzObjsSet = (Set<String>) authzObjs;
authzObjsSet.remove(authzObj);
if (authzObjsSet.size() == 1) {
authzObjs = authzObjsSet.iterator().next();
}
} else if (authzObjs.equals(authzObj)){
authzObjs = null;
}
}
}
void addAuthzObj(String authzObj) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding {} to {}", authzObj, this.toString());
}
if (authzObj != null) {
if (authzObjs == null) {
authzObjs = authzObj;
} else {
Set<String> authzObjsSet;
if (authzObjs instanceof String) {
if (authzObjs.equals(authzObj)) {
return;
} else {
authzObjs = authzObjsSet = newTreeSetWithElement((String) authzObjs);
}
} else {
authzObjsSet = (Set) authzObjs;
}
authzObjsSet.add(authzObj.intern());
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Added {} to {}", authzObj, this.toString());
}
}
void addAuthzObjs(Collection<String> authzObjs) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding {} to {}", authzObjs, this.toString());
}
if (authzObjs != null) {
for (String authzObj : authzObjs) {
addAuthzObj(authzObj.intern());
}
}
}
private void setType(EntryType type) {
this.type = type;
}
protected void removeParent() {
parent = null;
}
@Override
public String toString() {
return String.format("Entry[%s:%s -> authObj: %s]",
type, getFullPath(), authzObjsToString());
}
private String authzObjsToString() {
if (authzObjs == null) {
return "";
} else if (authzObjs instanceof String) {
return (String) authzObjs;
} else {
return Joiner.on(",").join((Set) authzObjs);
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Entry other = (Entry) obj;
if (parent == null) {
if (other.parent != null) {
return false;
}
} else if (!parent.equals(other.parent)) {
return false;
}
if (type == null) {
if (other.type != null) {
return false;
}
} else if (!type.equals(other.type)) {
return false;
}
if (pathElement == null) {
if (other.pathElement != null) {
return false;
}
} else if (!pathElement.equals(other.pathElement)) {
return false;
}
if (authzObjs == null) {
if (other.authzObjs != null) {
return false;
}
} else if (!authzObjs.equals(other.authzObjs)) {
return false;
}
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((parent == null) ? 0 : parent.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((pathElement == null) ? 0 : pathElement.hashCode());
result = prime * result + ((authzObjs == null) ? 0 : authzObjs.hashCode());
return result;
}
/**
* Create all missing parent entries for an given entry, and return the parent entry for
* the entry.
*
* For example, if {@code pathElements} is ["a", "b", "c"], it creates entries "/a" and "/a/b"
* if they do not exist, and then returns "/a/b" as the parent entry.
*
* @param pathElements path elements of the entry.
* @return the direct parent entry of the given entry.
*/
private Entry createParent(List<String> pathElements) {
Entry parent = this;
if (LOG.isDebugEnabled()) {
LOG.debug("[createParent]Trying to create entires for {} ", pathElements);
}
// The loop is resilient to 0 or 1 element list.
for (int i = 0; i < pathElements.size() - 1; i++) {
String elem = pathElements.get(i);
Entry child = parent.getChild(elem);
if (child == null) {
child = new Entry(parent, elem, EntryType.DIR, (String) null);
parent.putChild(elem, child);
if (LOG.isDebugEnabled()) {
LOG.debug("[createParent] Entry {} created", child.toString());
}
}
parent = child;
}
return parent;
}
/**
* Create a child entry based on the path, type and authzObj that
* associates with it.
*
* @param pathElements a path split into segments.
* @param type the type of the child entry.
* @param authzObj the authorizable Object associates with the entry.
* @return Returns the child entry.
*/
private Entry createChild(List<String> pathElements, EntryType type,
String authzObj) {
if (LOG.isDebugEnabled()) {
LOG.debug("[createChild] Creating child for {} with path {}", authzObj, pathElements);
}
// Create all the parent entries on the path if they do not exist.
Entry entryParent = createParent(pathElements);
String lastPathElement = pathElements.get(pathElements.size() - 1);
Entry child = entryParent.getChild(lastPathElement);
// Create the child entry if not found. If found and the entry is
// already a prefix or authzObj type, then only add the authzObj.
// If the entry already existed as dir, we change it to be a authzObj,
// and add the authzObj.
if (child == null) {
child = new Entry(entryParent, lastPathElement, type, authzObj);
entryParent.putChild(lastPathElement, child);
if (LOG.isDebugEnabled()) {
LOG.debug("Created child entry {}", child);
}
} else if (type == EntryType.AUTHZ_OBJECT &&
(child.getType() == EntryType.PREFIX || child.getType() == EntryType.AUTHZ_OBJECT)) {
child.addAuthzObj(authzObj);
if (LOG.isDebugEnabled()) {
LOG.debug("[createChild] Found Child {}, updated authzObj", child.toString());
}
} else if (type == EntryType.AUTHZ_OBJECT &&
child.getType() == EntryType.DIR) {
child.addAuthzObj(authzObj);
child.setType(EntryType.AUTHZ_OBJECT);
if (LOG.isDebugEnabled()) {
LOG.debug("[createChild] Found Child {}, updated authzObj", child.toString());
LOG.debug("[createChild] Updating type to", child.getType().toString());
}
}
return child;
}
public static Entry createRoot(boolean asPrefix) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating entry for root");
}
return new Entry(null, "/", asPrefix
? EntryType.PREFIX : EntryType.DIR, (String) null);
}
private String toPath(List<String> arr) {
StringBuilder sb = new StringBuilder();
for (String s : arr) {
sb.append(Path.SEPARATOR).append(s);
}
return sb.toString();
}
public Entry createPrefix(List<String> pathElements) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating entries for prefix paths", pathElements.toString());
}
Entry prefix = findPrefixEntry(pathElements);
if (prefix != null) {
throw new IllegalArgumentException(String.format(
"%s: createPrefix(%s): cannot add prefix under an existing prefix '%s'",
this, pathElements, prefix.getFullPath()));
}
return createChild(pathElements, EntryType.PREFIX, null);
}
public Entry createAuthzObjPath(List<String> pathElements, String authzObj) {
Entry entry = null;
if (LOG.isDebugEnabled()) {
LOG.debug("createAuthzObjPath authzObj:{} paths: {}", authzObj, pathElements);
}
Entry prefix = findPrefixEntry(pathElements);
if (prefix != null) {
// we only create the entry if is under a prefix, else we ignore it
entry = createChild(pathElements, EntryType.AUTHZ_OBJECT, authzObj);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("%s: createAuthzObjPath(%s, %s): outside of prefix, skipping",
this, authzObj, pathElements));
}
}
return entry;
}
/**
* Delete this entry from its parent.
*/
private void deleteFromParent() {
if (LOG.isDebugEnabled()) {
LOG.debug("[deleteFromParent] Attempting to remove path: {}", this.getFullPath());
}
if (getParent() != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Child in Parent Entry with path: {} is removed", getParent().getFullPath());
}
getParent().removeChild(getPathElement());
getParent().deleteIfDangling();
parent = null;
} else {
LOG.warn("Parent for {} not found", this.toString());
}
}
public void deleteAuthzObject(String authzObj) {
if (LOG.isDebugEnabled()) {
LOG.debug("[deleteAuthzObject] Removing authObj:{} from path {}", authzObj,
this.toString());
}
if(!getAuthzObjs().contains(authzObj)) {
return;
}
if (getParent() != null) {
if (!hasChildren()) {
// Remove the authzObj on the path entry. If the path
// entry no longer maps to any authzObj, removes the
// entry recursively.
if (authzObjs != null) {
removeAuthzObj(authzObj);
}
if (authzObjs == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting path {}", this.toString());
}
deleteFromParent();
}
} else {
// if the entry was for an authz object and has children, we
// change it to be a dir entry. And remove the authzObj on
// the path entry.
if (getType() == EntryType.AUTHZ_OBJECT) {
if (authzObjs != null) {
removeAuthzObj(authzObj);
}
if(getAuthzObjsSize() == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Entry with path: {} is changed to DIR", this.getFullPath());
}
setType(EntryType.DIR);
}
}
}
}
}
/**
* Move this Entry under the new parent.
* @param newParent the new parent.
* @param pathElem the path element on the new path.
*
* @return true if success. Returns false if the target with the same name already exists.
*/
private void moveTo(Entry newParent, String pathElem) {
if (LOG.isDebugEnabled()) {
LOG.debug("Moving {} as a child to {}", this.toString(), newParent.toString());
}
Preconditions.checkNotNull(newParent);
Preconditions.checkArgument(!pathElem.isEmpty());
if (newParent.getChild(pathElem) != null) {
LOG.warn(String.format(
"Attempt to move %s to %s: entry with the same name %s already exists",
this, newParent, pathElem));
return;
}
deleteFromParent();
parent = newParent;
parent.putChild(pathElem, this);
pathElement = pathElem.intern();
}
public void delete() {
if (getParent() != null) {
if (!hasChildren()) {
deleteFromParent();
} else {
// if the entry was for an authz object and has children, we
// change it to be a dir entry.
if (getType() == EntryType.AUTHZ_OBJECT) {
setType(EntryType.DIR);
clearAuthzObjs();
if (LOG.isDebugEnabled()) {
LOG.debug("Entry with path: {} is changed to DIR", this.getFullPath());
}
}
}
}
}
private void deleteIfDangling() {
if (!hasChildren() && getType().isRemoveIfDangling()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting {} as it is dangling", this.toString());
}
delete();
}
}
public Entry getParent() {
return parent;
}
public EntryType getType() {
return type;
}
public String getPathElement() {
return pathElement;
}
/**
* @return the set of auth objects. The returned set should be used only
* for querying, not for any modifications. If you just want to find out
* the set size or whether it's empty, use the specialized getAuthzObjsSize()
* and isAuthzObjsEmpty() methods that performs better.
*/
Set<String> getAuthzObjs() {
if (authzObjs != null) {
if (authzObjs instanceof Set) {
return (Set<String>) authzObjs;
} else {
return newTreeSetWithElement((String) authzObjs);
}
} else {
return Collections.<String>emptySet();
}
}
int getAuthzObjsSize() {
if (authzObjs != null) {
if (authzObjs instanceof Set) {
return ((Set<String>) authzObjs).size();
} else {
return 1;
}
} else {
return 0;
}
}
boolean isAuthzObjsEmpty() {
return authzObjs == null;
}
private Set<String> newTreeSetWithElement(String el) {
Set<String> result = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
result.add(el);
return result;
}
public Entry findPrefixEntry(List<String> pathElements) {
Preconditions.checkArgument(pathElements != null,
"pathElements cannot be NULL");
return (getType() == EntryType.PREFIX)
? this : findPrefixEntry(pathElements, 0);
}
private Entry findPrefixEntry(List<String> pathElements, int index) {
Entry prefixEntry = null;
if (index == pathElements.size()) {
prefixEntry = null;
} else {
Entry child = getChild(pathElements.get(index));
if (child != null) {
if (child.getType() == EntryType.PREFIX) {
prefixEntry = child;
} else {
prefixEntry = child.findPrefixEntry(pathElements, index + 1);
}
}
}
return prefixEntry;
}
public Entry find(String[] pathElements, boolean isPartialMatchOk) {
Preconditions.checkArgument(
pathElements != null && pathElements.length > 0,
"pathElements cannot be NULL or empty");
return find(pathElements, 0, isPartialMatchOk, null);
}
private Entry find(String[] pathElements, int index,
boolean isPartialMatchOk, Entry lastAuthObj) {
Entry found = null;
if (index == pathElements.length) {
if (isPartialMatchOk && !isAuthzObjsEmpty()) {
found = this;
}
} else {
Entry child = getChild(pathElements[index]);
if (child != null) {
if (index == pathElements.length - 1) {
found = (!child.isAuthzObjsEmpty()) ? child : lastAuthObj;
} else {
found = child.find(pathElements, index + 1, isPartialMatchOk,
(!child.isAuthzObjsEmpty()) ? child : lastAuthObj);
}
} else {
if (isPartialMatchOk) {
found = lastAuthObj;
}
}
}
return found;
}
public String getFullPath() {
String path = getFullPath(this, new StringBuilder()).toString();
if (path.isEmpty()) {
path = Path.SEPARATOR;
}
return path;
}
private StringBuilder getFullPath(Entry entry, StringBuilder sb) {
if (entry.getParent() != null) {
getFullPath(entry.getParent(), sb).append(Path.SEPARATOR).append(
entry.getPathElement());
}
return sb;
}
}
private volatile Entry root;
private String[] prefixes;
// The hive authorized objects to path entries mapping.
// One authorized object can map to a set of path entries.
private Map<String, Set<Entry>> authzObjToEntries;
public HMSPaths() {
LOG.info(toString() + " (default) Initialized");
}
public HMSPaths(String[] pathPrefixes) {
boolean rootPrefix = false;
// Copy the array to avoid external modification
this.prefixes = Arrays.copyOf(pathPrefixes, pathPrefixes.length);
for (String pathPrefix : pathPrefixes) {
rootPrefix = rootPrefix || pathPrefix.equals(Path.SEPARATOR);
}
if (rootPrefix && pathPrefixes.length > 1) {
throw new IllegalArgumentException(
"Root is a path prefix, there cannot be other path prefixes");
}
root = Entry.createRoot(rootPrefix);
if (!rootPrefix) {
for (String pathPrefix : pathPrefixes) {
root.createPrefix(getPathElements(pathPrefix));
}
}
authzObjToEntries = new TreeMap<String, Set<Entry>>(String.CASE_INSENSITIVE_ORDER);
LOG.info("Sentry managed prefixes: " + prefixes.toString());
}
void _addAuthzObject(String authzObj, List<String> authzObjPaths) {
addAuthzObject(authzObj, getPathsElements(authzObjPaths));
}
void addAuthzObject(String authzObj, List<List<String>> authzObjPathElements) {
if (LOG.isDebugEnabled()) {
LOG.debug("Number of Objects: {}", authzObjToEntries.size());
LOG.debug(String.format("%s addAuthzObject(%s, %s)",
this, authzObj, assemblePaths(authzObjPathElements)));
}
Set<Entry> previousEntries = authzObjToEntries.get(authzObj);
Set<Entry> newEntries = new HashSet<Entry>(authzObjPathElements.size());
for (List<String> pathElements : authzObjPathElements) {
Entry e = root.createAuthzObjPath(pathElements, authzObj);
if (e != null) {
newEntries.add(e);
} else {
LOG.warn(String.format("%s addAuthzObject(%s, %s):" +
" Ignoring path %s, no prefix",
this, authzObj, assemblePaths(authzObjPathElements), pathElements));
}
}
authzObjToEntries.put(authzObj, newEntries);
if (previousEntries != null) {
previousEntries.removeAll(newEntries);
if (!previousEntries.isEmpty()) {
for (Entry entry : previousEntries) {
if (LOG.isDebugEnabled()) {
LOG.debug("Removing stale path {}", entry.toString());
}
entry.deleteAuthzObject(authzObj);
}
}
}
}
void addPathsToAuthzObject(String authzObj,
List<List<String>> authzObjPathElements, boolean createNew) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("%s addPathsToAuthzObject(%s, %s, %b)",
this, authzObj, assemblePaths(authzObjPathElements), createNew));
}
Set<Entry> entries = authzObjToEntries.get(authzObj);
if (entries != null) {
Set<Entry> newEntries = new HashSet<Entry>(authzObjPathElements.size());
for (List<String> pathElements : authzObjPathElements) {
Entry e = root.createAuthzObjPath(pathElements, authzObj);
if (e != null) {
newEntries.add(e);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("%s addPathsToAuthzObject(%s, %s, %b):" +
" Cannot create authz obj for path %s because it is outside of prefix",
this, authzObj, assemblePaths(authzObjPathElements), createNew, pathElements));
}
}
}
entries.addAll(newEntries);
if (LOG.isDebugEnabled()) {
LOG.debug("[addPathsToAuthzObject]Updated path entries for {}", authzObj);
}
} else {
if (createNew) {
if (LOG.isDebugEnabled()) {
LOG.debug("No paths found for Object:{}, Adding new", authzObj);
}
addAuthzObject(authzObj, authzObjPathElements);
} else {
LOG.warn(String.format("%s addPathsToAuthzObject(%s, %s, %b):" +
" Path was not added to AuthzObject, could not find key in authzObjToPath",
this, authzObj, assemblePaths(authzObjPathElements), createNew));
}
}
}
void _addPathsToAuthzObject(String authzObj, List<String> authzObjPaths) {
addPathsToAuthzObject(authzObj, getPathsElements(authzObjPaths), false);
}
void addPathsToAuthzObject(String authzObj, List<List<String>> authzObjPaths) {
addPathsToAuthzObject(authzObj, authzObjPaths, false);
}
/*
1. Removes authzObj from all entries corresponding to the authzObjPathElements
( which also deletes the entry if no more authObjs to that path and does it recursively upwards)
2. Removes it from value of authzObjToPath Map for this authzObj key, does not reset entries to null even if entries is empty
*/
void deletePathsFromAuthzObject(String authzObj,
List<List<String>> authzObjPathElements) {
Set<Entry> entries = authzObjToEntries.get(authzObj);
if (entries != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("[deletePathsFromAuthzObject] For {}", authzObj);
}
for (List<String> pathElements : authzObjPathElements) {
Entry entry = root.find(
pathElements.toArray(new String[pathElements.size()]), false);
if (entry != null) {
entries.remove(entry);
entry.deleteAuthzObject(authzObj);
} else {
LOG.warn(String.format("%s deletePathsFromAuthzObject(%s, %s):" +
" Path %s was not deleted from AuthzObject, path not registered." +
" This is possible for implicit partition locations",
this, authzObj, assemblePaths(authzObjPathElements), pathElements));
}
}
if(entries.size() == 0) {
authzObjToEntries.remove(authzObj);
if (LOG.isDebugEnabled()) {
LOG.debug(
"[deletePathsFromAuthzObject] Removing the mapping for {} as the entries are stale",
authzObj);
}
}
} else {
LOG.warn(String.format("%s deletePathsFromAuthzObject(%s, %s):" +
" Path was not deleted from AuthzObject, could not find key in authzObjToPath",
this, authzObj, assemblePaths(authzObjPathElements)));
}
}
void deleteAuthzObject(String authzObj) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("%s deleteAuthzObject(%s)", this, authzObj));
LOG.debug("Number of Objects: {}", authzObjToEntries.size());
}
Set<Entry> entries = authzObjToEntries.remove(authzObj);
if (entries != null) {
for (Entry entry : entries) {
entry.deleteAuthzObject(authzObj);
}
}
}
Set<String> findAuthzObject(List<String> pathElements) {
return findAuthzObject(pathElements.toArray(new String[0]));
}
@Override
public Set<String> findAuthzObject(String[] pathElements) {
return findAuthzObject(pathElements, true);
}
@Override
public Set<String> findAuthzObjectExactMatches(String[] pathElements) {
return findAuthzObject(pathElements, false);
}
/**
* Based on the isPartialOk flag, returns all authorizable Objects
* (database/table/partition) associated with the path, or if no match
* is found returns the first ancestor that has the associated
* authorizable objects.
*
* @param pathElements A path split into segments.
* @param isPartialOk Flag that indicates if patial path match is Ok or not.
* @return Returns a set of authzObjects authzObject associated with this path.
*/
public Set<String> findAuthzObject(String[] pathElements, boolean isPartialOk) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("%s findAuthzObject(%s, %b)",
this, Arrays.toString(pathElements), isPartialOk));
}
// Handle '/'
if (pathElements == null || pathElements.length == 0) {
return null;
}
Entry entry = root.find(pathElements, isPartialOk);
Set<String> authzObjSet = (entry != null) ? entry.getAuthzObjs() : null;
if ((authzObjSet == null || authzObjSet.isEmpty()) && LOG.isDebugEnabled()) {
LOG.debug(String.format("%s findAuthzObject(%s, %b) - no authzObject found",
this, Arrays.toString(pathElements), isPartialOk));
}
return authzObjSet;
}
/*
Following condition should be true: oldName != newName
If oldPath == newPath, Example: rename external table (only HMS meta data is updated)
=> new_table.add(new_path), new_table.add(old_table_partition_paths), old_table.dropAllPaths.
If oldPath != newPath, Example: rename managed table (HMS metadata is updated as well as physical files are moved to new location)
=> new_table.add(new_path), old_table.dropAllPaths.
*/
void renameAuthzObject(String oldName, List<List<String>> oldPathElems,
String newName, List<List<String>> newPathElems) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("%s renameAuthzObject({%s, %s} -> {%s, %s})",
this, oldName, assemblePaths(oldPathElems), newName, assemblePaths(newPathElems)));
}
if (oldPathElems == null || oldPathElems.isEmpty() ||
newPathElems == null || newPathElems.isEmpty() ||
newName == null || newName.equals(oldName)) {
LOG.warn(String.format("%s renameAuthzObject({%s, %s} -> {%s, %s})" +
": invalid inputs, skipping",
this, oldName, assemblePaths(oldPathElems), newName, assemblePaths(newPathElems)));
return;
}
// if oldPath == newPath, that is path has not changed as part of rename and hence new table
// needs to have old paths => new_table.add(old_table_partition_paths)
List<String> oldPathElements = oldPathElems.get(0);
List<String> newPathElements = newPathElems.get(0);
if (!oldPathElements.equals(newPathElements)) {
Entry oldEntry = root.find(oldPathElements.toArray(new String[0]), false);
Entry newParent = root.createParent(newPathElements);
if (oldEntry == null) {
LOG.warn(String.format("%s Moving old paths for renameAuthzObject({%s, %s} -> {%s, %s}) is skipped. Cannot find entry for old name",
this, oldName, assemblePaths(oldPathElems), newName, assemblePaths(newPathElems)));
} else {
oldEntry.moveTo(newParent, newPathElements.get(newPathElements.size() - 1));
}
}
// Re-write authObj from oldName to newName.
Set<Entry> entries = authzObjToEntries.get(oldName);
if (entries == null) {
LOG.warn(String.format("%s renameAuthzObject({%s, %s} -> {%s, %s}):" +
" cannot find oldName %s in authzObjToPath",
this, oldName, assemblePaths(oldPathElems), newName, assemblePaths(newPathElems), oldName));
} else {
authzObjToEntries.put(newName, entries);
for (Entry e : entries) {
e.addAuthzObj(newName);
if (e.getAuthzObjs().contains(oldName)) {
e.removeAuthzObj(oldName);
} else {
LOG.warn(String.format("%s renameAuthzObject({%s, %s} -> {%s, %s}):" +
" Unexpected state: authzObjToPath has an " +
"entry %s where one of the authz objects does not have oldName",
this, oldName, assemblePaths(oldPathElems), newName, assemblePaths(newPathElems), e));
}
}
}
// old_table.dropAllPaths
deleteAuthzObject(oldName);
}
@Override
public boolean isUnderPrefix(String[] pathElements) {
return root.findPrefixEntry(Lists.newArrayList(pathElements)) != null;
}
// Used by the serializer
String[] getPrefixes() {
return prefixes;
}
Entry getRootEntry() {
return root;
}
void setRootEntry(Entry root) {
this.root = root;
}
void setAuthzObjToEntryMapping(Map<String, Set<Entry>> mapping) {
authzObjToEntries = mapping;
}
/**
* For logging: collect all path entries into a list.
*
* Each Entry has informative toString() implementation,
* so we can print the returned value directly.
*
* Non-recursive traversal.
*/
public Collection<Entry> getAllEntries() {
Collection<Entry> entries = new ArrayList<>();
Stack<Entry> stack = new Stack<>();
stack.push(root);
while (!stack.isEmpty()) {
Entry entry = stack.pop();
entries.add(entry);
for (Entry child : entry.childrenValues()) { // handles entry.children == null
stack.push(child);
}
}
return entries;
}
@Override
public HMSPathsDumper getPathsDump() {
return new HMSPathsDumper(this);
}
@Override
public String toString() {
return String.format("%s:%s", getClass().getSimpleName(), Arrays.toString(prefixes));
}
public String dumpContent() {
return toString() + ": " + getAllEntries();
}
}
|
googleapis/google-cloud-java | 36,145 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateExtensionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/extension_registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [ExtensionRegistryService.UpdateExtension][google.cloud.aiplatform.v1beta1.ExtensionRegistryService.UpdateExtension].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateExtensionRequest}
*/
public final class UpdateExtensionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UpdateExtensionRequest)
UpdateExtensionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateExtensionRequest.newBuilder() to construct.
private UpdateExtensionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateExtensionRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateExtensionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ExtensionRegistryServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateExtensionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ExtensionRegistryServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateExtensionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.Builder.class);
}
private int bitField0_;
public static final int EXTENSION_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.Extension extension_;
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the extension field is set.
*/
@java.lang.Override
public boolean hasExtension() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The extension.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Extension getExtension() {
return extension_ == null
? com.google.cloud.aiplatform.v1beta1.Extension.getDefaultInstance()
: extension_;
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ExtensionOrBuilder getExtensionOrBuilder() {
return extension_ == null
? com.google.cloud.aiplatform.v1beta1.Extension.getDefaultInstance()
: extension_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getExtension());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getExtension());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest other =
(com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest) obj;
if (hasExtension() != other.hasExtension()) return false;
if (hasExtension()) {
if (!getExtension().equals(other.getExtension())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasExtension()) {
hash = (37 * hash) + EXTENSION_FIELD_NUMBER;
hash = (53 * hash) + getExtension().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [ExtensionRegistryService.UpdateExtension][google.cloud.aiplatform.v1beta1.ExtensionRegistryService.UpdateExtension].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateExtensionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UpdateExtensionRequest)
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ExtensionRegistryServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateExtensionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ExtensionRegistryServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateExtensionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getExtensionFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
extension_ = null;
if (extensionBuilder_ != null) {
extensionBuilder_.dispose();
extensionBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ExtensionRegistryServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateExtensionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest build() {
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest result =
new com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.extension_ = extensionBuilder_ == null ? extension_ : extensionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest other) {
if (other == com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest.getDefaultInstance())
return this;
if (other.hasExtension()) {
mergeExtension(other.getExtension());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getExtensionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.Extension extension_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Extension,
com.google.cloud.aiplatform.v1beta1.Extension.Builder,
com.google.cloud.aiplatform.v1beta1.ExtensionOrBuilder>
extensionBuilder_;
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the extension field is set.
*/
public boolean hasExtension() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The extension.
*/
public com.google.cloud.aiplatform.v1beta1.Extension getExtension() {
if (extensionBuilder_ == null) {
return extension_ == null
? com.google.cloud.aiplatform.v1beta1.Extension.getDefaultInstance()
: extension_;
} else {
return extensionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setExtension(com.google.cloud.aiplatform.v1beta1.Extension value) {
if (extensionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
extension_ = value;
} else {
extensionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setExtension(
com.google.cloud.aiplatform.v1beta1.Extension.Builder builderForValue) {
if (extensionBuilder_ == null) {
extension_ = builderForValue.build();
} else {
extensionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeExtension(com.google.cloud.aiplatform.v1beta1.Extension value) {
if (extensionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& extension_ != null
&& extension_ != com.google.cloud.aiplatform.v1beta1.Extension.getDefaultInstance()) {
getExtensionBuilder().mergeFrom(value);
} else {
extension_ = value;
}
} else {
extensionBuilder_.mergeFrom(value);
}
if (extension_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearExtension() {
bitField0_ = (bitField0_ & ~0x00000001);
extension_ = null;
if (extensionBuilder_ != null) {
extensionBuilder_.dispose();
extensionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Extension.Builder getExtensionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getExtensionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.ExtensionOrBuilder getExtensionOrBuilder() {
if (extensionBuilder_ != null) {
return extensionBuilder_.getMessageOrBuilder();
} else {
return extension_ == null
? com.google.cloud.aiplatform.v1beta1.Extension.getDefaultInstance()
: extension_;
}
}
/**
*
*
* <pre>
* Required. The Extension which replaces the resource on the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.Extension extension = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Extension,
com.google.cloud.aiplatform.v1beta1.Extension.Builder,
com.google.cloud.aiplatform.v1beta1.ExtensionOrBuilder>
getExtensionFieldBuilder() {
if (extensionBuilder_ == null) {
extensionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Extension,
com.google.cloud.aiplatform.v1beta1.Extension.Builder,
com.google.cloud.aiplatform.v1beta1.ExtensionOrBuilder>(
getExtension(), getParentForChildren(), isClean());
extension_ = null;
}
return extensionBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* Supported fields:
*
* * `display_name`
* * `description`
* * `runtime_config`
* * `tool_use_examples`
* * `manifest.description`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UpdateExtensionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UpdateExtensionRequest)
private static final com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest();
}
public static com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateExtensionRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateExtensionRequest>() {
@java.lang.Override
public UpdateExtensionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateExtensionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateExtensionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateExtensionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/nifi | 36,568 | nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/StandardFlowSnippet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller;
import org.apache.nifi.bundle.BundleCoordinate;
import org.apache.nifi.connectable.Connectable;
import org.apache.nifi.connectable.ConnectableType;
import org.apache.nifi.connectable.Connection;
import org.apache.nifi.connectable.Funnel;
import org.apache.nifi.connectable.Port;
import org.apache.nifi.connectable.Position;
import org.apache.nifi.connectable.Size;
import org.apache.nifi.controller.exception.ProcessorInstantiationException;
import org.apache.nifi.controller.flow.FlowManager;
import org.apache.nifi.controller.label.Label;
import org.apache.nifi.controller.queue.FlowFileQueue;
import org.apache.nifi.controller.queue.LoadBalanceStrategy;
import org.apache.nifi.controller.service.ControllerServiceNode;
import org.apache.nifi.flow.ExecutionEngine;
import org.apache.nifi.flowfile.FlowFilePrioritizer;
import org.apache.nifi.groups.FlowFileConcurrency;
import org.apache.nifi.groups.FlowFileOutboundPolicy;
import org.apache.nifi.groups.ProcessGroup;
import org.apache.nifi.groups.RemoteProcessGroup;
import org.apache.nifi.groups.RemoteProcessGroupPortDescriptor;
import org.apache.nifi.logging.LogLevel;
import org.apache.nifi.nar.ExtensionDefinition;
import org.apache.nifi.nar.ExtensionManager;
import org.apache.nifi.parameter.ParameterContext;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.StandardProcessContext;
import org.apache.nifi.registry.flow.StandardVersionControlInformation;
import org.apache.nifi.registry.flow.VersionControlInformation;
import org.apache.nifi.remote.StandardRemoteProcessGroupPortDescriptor;
import org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol;
import org.apache.nifi.scheduling.ExecutionNode;
import org.apache.nifi.scheduling.SchedulingStrategy;
import org.apache.nifi.util.BundleUtils;
import org.apache.nifi.util.SnippetUtils;
import org.apache.nifi.web.api.dto.BatchSettingsDTO;
import org.apache.nifi.web.api.dto.BundleDTO;
import org.apache.nifi.web.api.dto.ConnectableDTO;
import org.apache.nifi.web.api.dto.ConnectionDTO;
import org.apache.nifi.web.api.dto.ControllerServiceDTO;
import org.apache.nifi.web.api.dto.FlowSnippetDTO;
import org.apache.nifi.web.api.dto.FunnelDTO;
import org.apache.nifi.web.api.dto.LabelDTO;
import org.apache.nifi.web.api.dto.PortDTO;
import org.apache.nifi.web.api.dto.PositionDTO;
import org.apache.nifi.web.api.dto.ProcessGroupDTO;
import org.apache.nifi.web.api.dto.ProcessorConfigDTO;
import org.apache.nifi.web.api.dto.ProcessorDTO;
import org.apache.nifi.web.api.dto.RelationshipDTO;
import org.apache.nifi.web.api.dto.RemoteProcessGroupContentsDTO;
import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO;
import org.apache.nifi.web.api.dto.RemoteProcessGroupPortDTO;
import org.apache.nifi.web.api.entity.ParameterContextReferenceEntity;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
public class StandardFlowSnippet implements FlowSnippet {
private final FlowSnippetDTO dto;
private final ExtensionManager extensionManager;
public StandardFlowSnippet(final FlowSnippetDTO dto, final ExtensionManager extensionManager) {
this.dto = dto;
this.extensionManager = extensionManager;
}
@Override
public void validate(final ProcessGroup group) {
// validate the names of Input Ports
for (final PortDTO port : dto.getInputPorts()) {
if (group.getInputPortByName(port.getName()) != null) {
throw new IllegalStateException("One or more of the proposed Port names is not available in the process group");
}
}
// validate the names of Output Ports
for (final PortDTO port : dto.getOutputPorts()) {
if (group.getOutputPortByName(port.getName()) != null) {
throw new IllegalStateException("One or more of the proposed Port names is not available in the process group");
}
}
verifyComponentTypesInSnippet();
SnippetUtils.verifyNoVersionControlConflicts(dto, group);
}
@Override
public void verifyComponentTypesInSnippet() {
final Map<String, Set<BundleCoordinate>> processorClasses = new HashMap<>();
for (final ExtensionDefinition extensionDefinition : extensionManager.getExtensions(Processor.class)) {
final String name = extensionDefinition.getImplementationClassName();
processorClasses.put(name, extensionManager.getBundles(name).stream().map(bundle -> bundle.getBundleDetails().getCoordinate()).collect(Collectors.toSet()));
}
verifyProcessorsInSnippet(dto, processorClasses);
final Map<String, Set<BundleCoordinate>> controllerServiceClasses = new HashMap<>();
for (final ExtensionDefinition extensionDefinition : extensionManager.getExtensions(ControllerService.class)) {
final String name = extensionDefinition.getImplementationClassName();
controllerServiceClasses.put(name, extensionManager.getBundles(name).stream().map(bundle -> bundle.getBundleDetails().getCoordinate()).collect(Collectors.toSet()));
}
verifyControllerServicesInSnippet(dto, controllerServiceClasses);
final Set<String> prioritizerClasses = new HashSet<>();
for (final ExtensionDefinition extensionDefinition : extensionManager.getExtensions(FlowFilePrioritizer.class)) {
final String name = extensionDefinition.getImplementationClassName();
prioritizerClasses.add(name);
}
final Set<ConnectionDTO> allConns = new HashSet<>();
allConns.addAll(dto.getConnections());
for (final ProcessGroupDTO childGroup : dto.getProcessGroups()) {
allConns.addAll(findAllConnections(childGroup));
}
for (final ConnectionDTO conn : allConns) {
final List<String> prioritizers = conn.getPrioritizers();
if (prioritizers != null) {
for (final String prioritizer : prioritizers) {
if (!prioritizerClasses.contains(prioritizer)) {
throw new IllegalStateException("Invalid FlowFile Prioritizer Type: " + prioritizer);
}
}
}
}
}
@Override
public void instantiate(final FlowManager flowManager, final FlowController flowController, final ProcessGroup group) throws ProcessorInstantiationException {
instantiate(flowManager, flowController, group, true);
}
/**
* Recursively finds all ConnectionDTO's
*
* @param group group
* @return connection dtos
*/
private Set<ConnectionDTO> findAllConnections(final ProcessGroupDTO group) {
final Set<ConnectionDTO> conns = new HashSet<>();
conns.addAll(group.getContents().getConnections());
for (final ProcessGroupDTO childGroup : group.getContents().getProcessGroups()) {
conns.addAll(findAllConnections(childGroup));
}
return conns;
}
private void verifyControllerServicesInSnippet(final FlowSnippetDTO templateContents, final Map<String, Set<BundleCoordinate>> supportedTypes) {
if (templateContents.getControllerServices() != null) {
templateContents.getControllerServices().forEach(controllerService -> {
if (supportedTypes.containsKey(controllerService.getType())) {
if (controllerService.getBundle() == null) {
throw new IllegalArgumentException("Controller Service bundle must be specified.");
}
verifyBundleInSnippet(controllerService.getBundle(), supportedTypes.get(controllerService.getType()));
} else {
throw new IllegalStateException("Invalid Controller Service Type: " + controllerService.getType());
}
});
}
if (templateContents.getProcessGroups() != null) {
templateContents.getProcessGroups().forEach(processGroup -> verifyControllerServicesInSnippet(processGroup.getContents(), supportedTypes));
}
}
private void verifyBundleInSnippet(final BundleDTO requiredBundle, final Set<BundleCoordinate> supportedBundles) {
final BundleCoordinate requiredCoordinate = new BundleCoordinate(requiredBundle.getGroup(), requiredBundle.getArtifact(), requiredBundle.getVersion());
if (!supportedBundles.contains(requiredCoordinate)) {
throw new IllegalStateException("Unsupported bundle: " + requiredCoordinate);
}
}
private void verifyProcessorsInSnippet(final FlowSnippetDTO templateContents, final Map<String, Set<BundleCoordinate>> supportedTypes) {
if (templateContents.getProcessors() != null) {
templateContents.getProcessors().forEach(processor -> {
if (processor.getBundle() == null) {
throw new IllegalArgumentException("Processor bundle must be specified.");
}
if (supportedTypes.containsKey(processor.getType())) {
verifyBundleInSnippet(processor.getBundle(), supportedTypes.get(processor.getType()));
} else {
throw new IllegalStateException("Invalid Processor Type: " + processor.getType());
}
});
}
if (templateContents.getProcessGroups() != null) {
templateContents.getProcessGroups().forEach(processGroup -> verifyProcessorsInSnippet(processGroup.getContents(), supportedTypes));
}
}
public void instantiate(final FlowManager flowManager, final FlowController flowController, final ProcessGroup group, final boolean topLevel) {
//
// Instantiate Controller Services
//
final List<ControllerServiceNode> serviceNodes = new ArrayList<>();
try {
for (final ControllerServiceDTO controllerServiceDTO : dto.getControllerServices()) {
final BundleCoordinate bundleCoordinate = BundleUtils.getBundle(extensionManager, controllerServiceDTO.getType(), controllerServiceDTO.getBundle());
final ControllerServiceNode serviceNode = flowManager.createControllerService(controllerServiceDTO.getType(), controllerServiceDTO.getId(),
bundleCoordinate, Collections.emptySet(), true, true, null);
serviceNode.pauseValidationTrigger();
serviceNodes.add(serviceNode);
serviceNode.setAnnotationData(controllerServiceDTO.getAnnotationData());
serviceNode.setComments(controllerServiceDTO.getComments());
serviceNode.setName(controllerServiceDTO.getName());
if (controllerServiceDTO.getBulletinLevel() != null) {
serviceNode.setBulletinLevel(LogLevel.valueOf(controllerServiceDTO.getBulletinLevel()));
} else {
// this situation exists for backward compatibility with nifi 1.16 and earlier where controller services do not have bulletinLevels set in flow.xml/flow.json
// and bulletinLevels are at the WARN level by default
serviceNode.setBulletinLevel(LogLevel.WARN);
}
if (!topLevel) {
serviceNode.setVersionedComponentId(controllerServiceDTO.getVersionedComponentId());
}
group.addControllerService(serviceNode);
}
// configure controller services. We do this after creating all of them in case 1 service
// references another service.
for (final ControllerServiceDTO controllerServiceDTO : dto.getControllerServices()) {
final String serviceId = controllerServiceDTO.getId();
final ControllerServiceNode serviceNode = flowManager.getControllerServiceNode(serviceId);
final Set<String> sensitiveDynamicPropertyNames = controllerServiceDTO.getSensitiveDynamicPropertyNames();
serviceNode.setProperties(controllerServiceDTO.getProperties(), false, sensitiveDynamicPropertyNames == null ? Collections.emptySet() : sensitiveDynamicPropertyNames);
}
} finally {
serviceNodes.forEach(ControllerServiceNode::resumeValidationTrigger);
}
//
// Instantiate the labels
//
for (final LabelDTO labelDTO : dto.getLabels()) {
final Label label = flowManager.createLabel(labelDTO.getId(), labelDTO.getLabel());
label.setPosition(toPosition(labelDTO.getPosition()));
if (labelDTO.getWidth() != null && labelDTO.getHeight() != null) {
label.setSize(new Size(labelDTO.getWidth(), labelDTO.getHeight()));
}
label.setStyle(labelDTO.getStyle());
if (labelDTO.getzIndex() != null) {
label.setZIndex(label.getZIndex());
}
if (!topLevel) {
label.setVersionedComponentId(labelDTO.getVersionedComponentId());
}
group.addLabel(label);
}
// Instantiate the funnels
for (final FunnelDTO funnelDTO : dto.getFunnels()) {
final Funnel funnel = flowManager.createFunnel(funnelDTO.getId());
funnel.setPosition(toPosition(funnelDTO.getPosition()));
if (!topLevel) {
funnel.setVersionedComponentId(funnelDTO.getVersionedComponentId());
}
group.addFunnel(funnel);
}
//
// Instantiate Input Ports & Output Ports
//
for (final PortDTO portDTO : dto.getInputPorts()) {
final Port inputPort;
if (group.isRootGroup() || Boolean.TRUE.equals(portDTO.getAllowRemoteAccess())) {
final String portName = generatePublicInputPortName(flowManager, portDTO.getName());
inputPort = flowManager.createPublicInputPort(portDTO.getId(), portName);
} else {
inputPort = flowManager.createLocalInputPort(portDTO.getId(), portDTO.getName());
}
if (!topLevel) {
inputPort.setVersionedComponentId(portDTO.getVersionedComponentId());
}
inputPort.setPosition(toPosition(portDTO.getPosition()));
inputPort.setProcessGroup(group);
inputPort.setMaxConcurrentTasks(portDTO.getConcurrentlySchedulableTaskCount());
inputPort.setComments(portDTO.getComments());
if (portDTO.getState().equals(ScheduledState.DISABLED.toString())) {
inputPort.disable();
}
group.addInputPort(inputPort);
}
for (final PortDTO portDTO : dto.getOutputPorts()) {
final Port outputPort;
if (group.isRootGroup() || Boolean.TRUE.equals(portDTO.getAllowRemoteAccess())) {
final String portName = generatePublicOutputPortName(flowManager, portDTO.getName());
outputPort = flowManager.createPublicOutputPort(portDTO.getId(), portName);
} else {
outputPort = flowManager.createLocalOutputPort(portDTO.getId(), portDTO.getName());
}
if (!topLevel) {
outputPort.setVersionedComponentId(portDTO.getVersionedComponentId());
}
outputPort.setPosition(toPosition(portDTO.getPosition()));
outputPort.setProcessGroup(group);
outputPort.setMaxConcurrentTasks(portDTO.getConcurrentlySchedulableTaskCount());
outputPort.setComments(portDTO.getComments());
if (portDTO.getState().equals(ScheduledState.DISABLED.toString())) {
outputPort.disable();
}
group.addOutputPort(outputPort);
}
//
// Instantiate the processors
//
for (final ProcessorDTO processorDTO : dto.getProcessors()) {
final BundleCoordinate bundleCoordinate = BundleUtils.getBundle(extensionManager, processorDTO.getType(), processorDTO.getBundle());
final ProcessorNode procNode = flowManager.createProcessor(processorDTO.getType(), processorDTO.getId(), bundleCoordinate);
procNode.pauseValidationTrigger();
try {
procNode.setPosition(toPosition(processorDTO.getPosition()));
procNode.setProcessGroup(group);
if (!topLevel) {
procNode.setVersionedComponentId(processorDTO.getVersionedComponentId());
}
final ProcessorConfigDTO config = processorDTO.getConfig();
procNode.setComments(config.getComments());
if (config.isLossTolerant() != null) {
procNode.setLossTolerant(config.isLossTolerant());
}
procNode.setName(processorDTO.getName());
procNode.setYieldPeriod(config.getYieldDuration());
procNode.setPenalizationPeriod(config.getPenaltyDuration());
procNode.setBulletinLevel(LogLevel.valueOf(config.getBulletinLevel()));
procNode.setAnnotationData(config.getAnnotationData());
procNode.setRetryCount(config.getRetryCount());
procNode.setRetriedRelationships(config.getRetriedRelationships());
if (config.getBackoffMechanism() != null) {
procNode.setBackoffMechanism(BackoffMechanism.valueOf(config.getBackoffMechanism()));
}
procNode.setMaxBackoffPeriod(config.getMaxBackoffPeriod());
procNode.setStyle(processorDTO.getStyle());
if (config.getRunDurationMillis() != null) {
procNode.setRunDuration(config.getRunDurationMillis(), TimeUnit.MILLISECONDS);
}
if (config.getSchedulingStrategy() != null) {
procNode.setSchedulingStrategy(SchedulingStrategy.valueOf(config.getSchedulingStrategy()));
}
if (config.getExecutionNode() != null) {
procNode.setExecutionNode(ExecutionNode.valueOf(config.getExecutionNode()));
}
if (processorDTO.getState().equals(ScheduledState.DISABLED.toString())) {
procNode.disable();
}
// ensure that the scheduling strategy is set prior to these values
procNode.setMaxConcurrentTasks(config.getConcurrentlySchedulableTaskCount());
procNode.setSchedulingPeriod(config.getSchedulingPeriod());
final Set<Relationship> relationships = new HashSet<>();
if (processorDTO.getRelationships() != null) {
for (final RelationshipDTO rel : processorDTO.getRelationships()) {
if (rel.isAutoTerminate()) {
relationships.add(procNode.getRelationship(rel.getName()));
}
}
procNode.setAutoTerminatedRelationships(relationships);
}
// We need to add the processor to the ProcessGroup before calling ProcessorNode.setProperties. This will notify the FlowManager that the Processor
// has been added to the flow, which is important before calling ProcessorNode.setProperties, since #setProperties may call methods that result in looking
// up a Controller Service (such as #getClassloaderIsolationKey). The Processor must be registered with the FlowManager and its parent Process Group
// before that can happen, in order to ensure that it has access to any referenced Controller Service.
group.addProcessor(procNode);
if (config.getProperties() != null) {
final Set<String> sensitiveDynamicPropertyNames = config.getSensitiveDynamicPropertyNames();
procNode.setProperties(config.getProperties(), false, sensitiveDynamicPropertyNames == null ? Collections.emptySet() : sensitiveDynamicPropertyNames);
}
// Notify the processor node that the configuration (properties, e.g.) has been restored
final Class<?> componentClass = procNode.getProcessor() == null ? null : procNode.getProcessor().getClass();
final StandardProcessContext processContext = new StandardProcessContext(procNode, flowController.getControllerServiceProvider(),
flowController.getStateManagerProvider().getStateManager(procNode.getProcessor().getIdentifier(), componentClass), () -> false, flowController);
procNode.onConfigurationRestored(processContext);
} finally {
procNode.resumeValidationTrigger();
}
}
//
// Instantiate Remote Process Groups
//
for (final RemoteProcessGroupDTO remoteGroupDTO : dto.getRemoteProcessGroups()) {
final RemoteProcessGroup remoteGroup = flowManager.createRemoteProcessGroup(remoteGroupDTO.getId(), remoteGroupDTO.getTargetUris());
remoteGroup.setComments(remoteGroupDTO.getComments());
remoteGroup.setPosition(toPosition(remoteGroupDTO.getPosition()));
remoteGroup.setCommunicationsTimeout(remoteGroupDTO.getCommunicationsTimeout());
remoteGroup.setYieldDuration(remoteGroupDTO.getYieldDuration());
if (!topLevel) {
remoteGroup.setVersionedComponentId(remoteGroupDTO.getVersionedComponentId());
}
if (remoteGroupDTO.getTransportProtocol() == null) {
remoteGroup.setTransportProtocol(SiteToSiteTransportProtocol.RAW);
} else {
remoteGroup.setTransportProtocol(SiteToSiteTransportProtocol.valueOf(remoteGroupDTO.getTransportProtocol()));
}
remoteGroup.setProxyHost(remoteGroupDTO.getProxyHost());
remoteGroup.setProxyPort(remoteGroupDTO.getProxyPort());
remoteGroup.setProxyUser(remoteGroupDTO.getProxyUser());
remoteGroup.setProxyPassword(remoteGroupDTO.getProxyPassword());
remoteGroup.setProcessGroup(group);
// set the input/output ports
if (remoteGroupDTO.getContents() != null) {
final RemoteProcessGroupContentsDTO contents = remoteGroupDTO.getContents();
// ensure there are input ports
if (contents.getInputPorts() != null) {
remoteGroup.setInputPorts(convertRemotePort(contents.getInputPorts()), false);
}
// ensure there are output ports
if (contents.getOutputPorts() != null) {
remoteGroup.setOutputPorts(convertRemotePort(contents.getOutputPorts()), false);
}
}
group.addRemoteProcessGroup(remoteGroup);
}
//
// Instantiate ProcessGroups
//
for (final ProcessGroupDTO groupDTO : dto.getProcessGroups()) {
final ProcessGroup childGroup = flowManager.createProcessGroup(groupDTO.getId());
childGroup.setParent(group);
childGroup.setPosition(toPosition(groupDTO.getPosition()));
childGroup.setComments(groupDTO.getComments());
childGroup.setName(groupDTO.getName());
childGroup.setExecutionEngine(ExecutionEngine.valueOf(groupDTO.getExecutionEngine()));
childGroup.setStatelessFlowTimeout(groupDTO.getStatelessFlowTimeout());
childGroup.setMaxConcurrentTasks(groupDTO.getMaxConcurrentTasks());
final String flowfileConcurrentName = groupDTO.getFlowfileConcurrency();
if (flowfileConcurrentName != null) {
childGroup.setFlowFileConcurrency(FlowFileConcurrency.valueOf(flowfileConcurrentName));
}
final String outboundPolicyName = groupDTO.getFlowfileOutboundPolicy();
if (outboundPolicyName != null) {
childGroup.setFlowFileOutboundPolicy(FlowFileOutboundPolicy.valueOf(outboundPolicyName));
}
final ParameterContextReferenceEntity parameterContextReference = groupDTO.getParameterContext();
if (parameterContextReference != null) {
final ParameterContext parameterContext = flowManager.getParameterContextManager().getParameterContext(parameterContextReference.getId());
if (parameterContext != null) {
childGroup.setParameterContext(parameterContext);
}
}
final String defaultFlowFileExpiration = groupDTO.getDefaultFlowFileExpiration();
if (defaultFlowFileExpiration != null) {
childGroup.setDefaultFlowFileExpiration(defaultFlowFileExpiration);
}
final Long defaultBackPressureObjectThreshold = groupDTO.getDefaultBackPressureObjectThreshold();
if (defaultBackPressureObjectThreshold != null) {
childGroup.setDefaultBackPressureObjectThreshold(defaultBackPressureObjectThreshold);
}
final String defaultBackPressureDataSizeThreshold = groupDTO.getDefaultBackPressureDataSizeThreshold();
if (defaultBackPressureDataSizeThreshold != null) {
childGroup.setDefaultBackPressureDataSizeThreshold(defaultBackPressureDataSizeThreshold);
}
final String logFileSuffix = groupDTO.getLogFileSuffix();
if (logFileSuffix != null) {
childGroup.setLogFileSuffix(logFileSuffix);
}
// If this Process Group is 'top level' then we do not set versioned component ID's.
// We do this only if this component is the child of a Versioned Component.
if (!topLevel) {
childGroup.setVersionedComponentId(groupDTO.getVersionedComponentId());
}
group.addProcessGroup(childGroup);
final FlowSnippetDTO contents = groupDTO.getContents();
// we want this to be recursive, so we will create a new template that contains only
// the contents of this child group and recursively call ourselves.
final FlowSnippetDTO childTemplateDTO = new FlowSnippetDTO();
childTemplateDTO.setConnections(contents.getConnections());
childTemplateDTO.setInputPorts(contents.getInputPorts());
childTemplateDTO.setLabels(contents.getLabels());
childTemplateDTO.setOutputPorts(contents.getOutputPorts());
childTemplateDTO.setProcessGroups(contents.getProcessGroups());
childTemplateDTO.setProcessors(contents.getProcessors());
childTemplateDTO.setFunnels(contents.getFunnels());
childTemplateDTO.setRemoteProcessGroups(contents.getRemoteProcessGroups());
childTemplateDTO.setControllerServices(contents.getControllerServices());
final StandardFlowSnippet childSnippet = new StandardFlowSnippet(childTemplateDTO, extensionManager);
childSnippet.instantiate(flowManager, flowController, childGroup, false);
if (groupDTO.getVersionControlInformation() != null) {
final VersionControlInformation vci = StandardVersionControlInformation.Builder
.fromDto(groupDTO.getVersionControlInformation())
.build();
childGroup.setVersionControlInformation(vci, Collections.emptyMap());
}
}
//
// Instantiate Connections
//
for (final ConnectionDTO connectionDTO : dto.getConnections()) {
final ConnectableDTO sourceDTO = connectionDTO.getSource();
final ConnectableDTO destinationDTO = connectionDTO.getDestination();
final Connectable source;
final Connectable destination;
// Locate the source and destination connectable. If this is a remote port we need to locate the remote process groups. Otherwise, we need to
// find the connectable given its parent group.
//
// NOTE: (getConnectable returns ANY connectable, when the parent is not this group only input ports or output ports should be returned. If something
// other than a port is returned, an exception will be thrown when adding the connection below.)
// See if the source connectable is a remote port
if (ConnectableType.REMOTE_OUTPUT_PORT.name().equals(sourceDTO.getType())) {
final RemoteProcessGroup remoteGroup = group.getRemoteProcessGroup(sourceDTO.getGroupId());
source = remoteGroup.getOutputPort(sourceDTO.getId());
} else {
final ProcessGroup sourceGroup = getConnectableParent(group, sourceDTO.getGroupId(), flowManager);
source = sourceGroup.getConnectable(sourceDTO.getId());
}
// see if the destination connectable is a remote port
if (ConnectableType.REMOTE_INPUT_PORT.name().equals(destinationDTO.getType())) {
final RemoteProcessGroup remoteGroup = group.getRemoteProcessGroup(destinationDTO.getGroupId());
destination = remoteGroup.getInputPort(destinationDTO.getId());
} else {
final ProcessGroup destinationGroup = getConnectableParent(group, destinationDTO.getGroupId(), flowManager);
destination = destinationGroup.getConnectable(destinationDTO.getId());
}
// determine the selection relationships for this connection
final Set<String> relationships = new HashSet<>();
if (connectionDTO.getSelectedRelationships() != null) {
relationships.addAll(connectionDTO.getSelectedRelationships());
}
final Connection connection = flowManager.createConnection(connectionDTO.getId(), connectionDTO.getName(), source, destination, relationships);
if (!topLevel) {
connection.setVersionedComponentId(connectionDTO.getVersionedComponentId());
}
if (connectionDTO.getzIndex() != null) {
connection.setZIndex(connection.getZIndex());
}
if (connectionDTO.getBends() != null) {
final List<Position> bendPoints = new ArrayList<>();
for (final PositionDTO bend : connectionDTO.getBends()) {
bendPoints.add(new Position(bend.getX(), bend.getY()));
}
connection.setBendPoints(bendPoints);
}
final FlowFileQueue queue = connection.getFlowFileQueue();
queue.setBackPressureDataSizeThreshold(connectionDTO.getBackPressureDataSizeThreshold());
queue.setBackPressureObjectThreshold(connectionDTO.getBackPressureObjectThreshold());
queue.setFlowFileExpiration(connectionDTO.getFlowFileExpiration());
final List<String> prioritizers = connectionDTO.getPrioritizers();
if (prioritizers != null) {
final List<String> newPrioritizersClasses = new ArrayList<>(prioritizers);
final List<FlowFilePrioritizer> newPrioritizers = new ArrayList<>();
for (final String className : newPrioritizersClasses) {
try {
newPrioritizers.add(flowManager.createPrioritizer(className));
} catch (final ClassNotFoundException | InstantiationException | IllegalAccessException e) {
throw new IllegalArgumentException("Unable to set prioritizer " + className + ": " + e);
}
}
queue.setPriorities(newPrioritizers);
}
final String loadBalanceStrategyName = connectionDTO.getLoadBalanceStrategy();
if (loadBalanceStrategyName != null) {
final LoadBalanceStrategy loadBalanceStrategy = LoadBalanceStrategy.valueOf(loadBalanceStrategyName);
final String partitioningAttribute = connectionDTO.getLoadBalancePartitionAttribute();
queue.setLoadBalanceStrategy(loadBalanceStrategy, partitioningAttribute);
}
connection.setProcessGroup(group);
group.addConnection(connection);
}
}
private String generatePublicInputPortName(final FlowManager flowManager, final String proposedName) {
final Optional<Port> existingPort = flowManager.getPublicInputPort(proposedName);
if (existingPort.isPresent()) {
return generatePublicInputPortName(flowManager, "Copy of " + proposedName);
} else {
return proposedName;
}
}
private String generatePublicOutputPortName(final FlowManager flowManager, final String proposedName) {
final Optional<Port> existingPort = flowManager.getPublicOutputPort(proposedName);
if (existingPort.isPresent()) {
return generatePublicOutputPortName(flowManager, "Copy of " + proposedName);
} else {
return proposedName;
}
}
private ProcessGroup getConnectableParent(final ProcessGroup group, final String parentGroupId, final FlowManager flowManager) {
if (flowManager.areGroupsSame(group.getIdentifier(), parentGroupId)) {
return group;
} else {
return group.getProcessGroup(parentGroupId);
}
}
private Position toPosition(final PositionDTO dto) {
return new Position(dto.getX(), dto.getY());
}
/**
* Converts a set of ports into a set of remote process group ports.
*
* @param ports ports
* @return group descriptors
*/
private Set<RemoteProcessGroupPortDescriptor> convertRemotePort(final Set<RemoteProcessGroupPortDTO> ports) {
Set<RemoteProcessGroupPortDescriptor> remotePorts = null;
if (ports != null) {
remotePorts = new LinkedHashSet<>(ports.size());
for (final RemoteProcessGroupPortDTO port : ports) {
final StandardRemoteProcessGroupPortDescriptor descriptor = new StandardRemoteProcessGroupPortDescriptor();
descriptor.setId(port.getId());
descriptor.setVersionedComponentId(port.getVersionedComponentId());
descriptor.setTargetId(port.getTargetId());
descriptor.setName(port.getName());
descriptor.setComments(port.getComments());
descriptor.setTargetRunning(port.isTargetRunning());
descriptor.setConnected(port.isConnected());
descriptor.setConcurrentlySchedulableTaskCount(port.getConcurrentlySchedulableTaskCount());
descriptor.setTransmitting(port.isTransmitting());
descriptor.setUseCompression(port.getUseCompression());
final BatchSettingsDTO batchSettings = port.getBatchSettings();
if (batchSettings != null) {
descriptor.setBatchCount(batchSettings.getCount());
descriptor.setBatchSize(batchSettings.getSize());
descriptor.setBatchDuration(batchSettings.getDuration());
}
remotePorts.add(descriptor);
}
}
return remotePorts;
}
}
|
apache/flink | 36,338 | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/hashtable/LongHashPartition.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.hashtable;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.core.memory.SeekableDataInputView;
import org.apache.flink.runtime.io.disk.iomanager.AbstractChannelWriterOutputView;
import org.apache.flink.runtime.io.disk.iomanager.BlockChannelWriter;
import org.apache.flink.runtime.io.disk.iomanager.BulkBlockChannelReader;
import org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView;
import org.apache.flink.runtime.io.disk.iomanager.FileIOChannel;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.memory.AbstractPagedInputView;
import org.apache.flink.runtime.memory.AbstractPagedOutputView;
import org.apache.flink.table.data.binary.BinaryRowData;
import org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer;
import org.apache.flink.table.runtime.util.FileChannelUtil;
import org.apache.flink.table.runtime.util.LazyMemorySegmentPool;
import org.apache.flink.table.runtime.util.RowIterator;
import org.apache.flink.util.MathUtils;
import org.apache.flink.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import static org.apache.flink.table.runtime.hashtable.BaseHybridHashTable.partitionLevelHash;
import static org.apache.flink.table.runtime.hashtable.LongHybridHashTable.hashLong;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* Partition for {@link LongHybridHashTable}.
*
* <p>The layout of the buckets inside a memory segment is as follows:
*
* <p>Hash mode: +----------------------------- Bucket area ---------------------------- | long key
* (8 bytes) | address (8 bytes) | | long key (8 bytes) | address (8 bytes) | | long key (8 bytes) |
* address (8 bytes) | | ... +----------------------------- Data area -------------------------- |
* size & address of next row with the same key (8bytes) | binary row | | size & address of next row
* with the same key (8bytes) | binary row | | size & address of next row with the same key (8bytes)
* | binary row | | ...
*
* <p>Dense mode: +----------------------------- Bucket area ---------------------------- | address1
* (8 bytes) | address2 (8 bytes) | address3 (8 bytes) | ... Directly addressed by the index of the
* corresponding array of key values.
*/
public class LongHashPartition extends AbstractPagedInputView implements SeekableDataInputView {
private static final Logger LOG = LoggerFactory.getLogger(LongHashPartition.class);
// The number of bits for size in address
private static final int SIZE_BITS = 28;
private static final int SIZE_MASK = 0xfffffff;
// bucket element size in sparse mode: long key (8 bytes) + address pointer (8 bytes)
private static final int SPARSE_BUCKET_ELEMENT_SIZE_IN_BYTES = 16;
static final long INVALID_ADDRESS = 0x00000FFFFFFFFFL;
private final LongHybridHashTable longTable;
// segment size related properties
private final int segmentSize;
private final int segmentSizeBits;
private final int segmentSizeMask;
private int partitionNum;
private final BinaryRowDataSerializer buildSideSerializer;
private final BinaryRowData buildReuseRow;
private int recursionLevel;
// The minimum key
private long minKey = Long.MAX_VALUE;
// The maximum key
private long maxKey = Long.MIN_VALUE;
// The bucket area for this partition
private MemorySegment[] buckets;
private int numBuckets;
private int numBucketsMask;
// The in-memory data area for this partition
private MemorySegment[] partitionBuffers;
private int finalBufferLimit;
private int currentBufferNum;
private BuildSideBuffer buildSideWriteBuffer;
AbstractChannelWriterOutputView probeSideBuffer;
long probeSideRecordCounter; // number of probe-side records in this partition
// The number of unique keys.
private long numKeys;
private final MatchIterator iterator;
// the channel writer for the build side, if partition is spilled
private BlockChannelWriter<MemorySegment> buildSideChannel;
// number of build-side records in this partition
private long buildSideRecordCounter;
int probeNumBytesInLastSeg;
/** Entrance 1: Init LongHashPartition for new insert and search. */
LongHashPartition(
LongHybridHashTable longTable,
int partitionNum,
BinaryRowDataSerializer buildSideSerializer,
double estimatedRowCount,
int maxSegs,
int recursionLevel) {
this(
longTable,
partitionNum,
buildSideSerializer,
getBucketBuffersByRowCount((long) estimatedRowCount, maxSegs, longTable.pageSize()),
recursionLevel,
null,
0);
this.buildSideWriteBuffer = new BuildSideBuffer(longTable.nextSegment());
}
/**
* Entrance 2: build table from spilled partition when the partition fits entirely into main
* memory.
*/
LongHashPartition(
LongHybridHashTable longTable,
int partitionNum,
BinaryRowDataSerializer buildSideSerializer,
int bucketNumSegs,
int recursionLevel,
List<MemorySegment> buffers,
int lastSegmentLimit) {
this(longTable, buildSideSerializer, listToArray(buffers));
this.partitionNum = partitionNum;
this.recursionLevel = recursionLevel;
int numBuckets = MathUtils.roundDownToPowerOf2(segmentSize / 16 * bucketNumSegs);
MemorySegment[] buckets = new MemorySegment[bucketNumSegs];
for (int i = 0; i < bucketNumSegs; i++) {
buckets[i] = longTable.nextSegment();
}
setNewBuckets(buckets, numBuckets);
this.finalBufferLimit = lastSegmentLimit;
}
/**
* Entrance 3: dense mode for just data search (bucket in LongHybridHashTable of dense mode).
*/
LongHashPartition(
LongHybridHashTable longTable,
BinaryRowDataSerializer buildSideSerializer,
MemorySegment[] partitionBuffers) {
super(0);
this.longTable = longTable;
this.buildSideSerializer = buildSideSerializer;
this.buildReuseRow = buildSideSerializer.createInstance();
this.segmentSize = longTable.pageSize();
Preconditions.checkArgument(segmentSize % 16 == 0);
this.partitionBuffers = partitionBuffers;
this.segmentSizeBits = MathUtils.log2strict(segmentSize);
this.segmentSizeMask = segmentSize - 1;
this.finalBufferLimit = segmentSize;
this.iterator = new MatchIterator();
}
private static MemorySegment[] listToArray(List<MemorySegment> list) {
if (list != null) {
return list.toArray(new MemorySegment[0]);
}
return null;
}
private static int getBucketBuffersByRowCount(long rowCount, int maxSegs, int segmentSize) {
int minNumBuckets = (int) Math.ceil((rowCount / 0.5));
Preconditions.checkArgument(segmentSize % 16 == 0);
return MathUtils.roundDownToPowerOf2(
(int)
Math.max(
1,
Math.min(
maxSegs,
Math.ceil(((double) minNumBuckets) * 16 / segmentSize))));
}
private void setNewBuckets(MemorySegment[] buckets, int numBuckets) {
for (MemorySegment segment : buckets) {
for (int i = 0; i < segmentSize; i += 16) {
// Maybe we don't need init key, cause always verify address
segment.putLong(i, 0);
segment.putLong(i + 8, INVALID_ADDRESS);
}
}
this.buckets = buckets;
checkArgument(MathUtils.isPowerOf2(numBuckets));
this.numBuckets = numBuckets;
this.numBucketsMask = numBuckets - 1;
this.numKeys = 0;
}
private static long toAddrAndLen(long address, int size) {
return (address << SIZE_BITS) | size;
}
private static long toAddress(long addrAndLen) {
return addrAndLen >>> SIZE_BITS;
}
private static int toLength(long addrAndLen) {
return (int) (addrAndLen & SIZE_MASK);
}
/** Returns an iterator of BinaryRowData for multiple linked values. */
MatchIterator valueIter(long address) {
iterator.set(address);
return iterator;
}
/** Returns an iterator for all the values for the given key, or null if no value found. */
public MatchIterator get(long key, int hashCode) {
int bucket = findBucket(hashCode);
int bucketOffset = bucket << 4;
MemorySegment segment = buckets[bucketOffset >>> segmentSizeBits];
int segOffset = bucketOffset & segmentSizeMask;
while (true) {
long address = segment.getLong(segOffset + 8);
if (address != INVALID_ADDRESS) {
if (segment.getLong(segOffset) == key) {
return valueIter(address);
} else {
bucket = (bucket + 1) & numBucketsMask;
if (segOffset + 16 < segmentSize) {
segOffset += 16;
} else {
bucketOffset = bucket << 4;
segOffset = bucketOffset & segmentSizeMask;
segment = buckets[bucketOffset >>> segmentSizeBits];
}
}
} else {
return valueIter(INVALID_ADDRESS);
}
}
}
/** Update the address in array for given key. */
private void updateIndex(
long key,
int hashCode,
long address,
int size,
MemorySegment dataSegment,
int currentPositionInSegment)
throws IOException {
assert (numKeys <= numBuckets / 2);
int bucketId = findBucket(hashCode);
// each bucket occupied 16 bytes (long key + long pointer to data address)
int bucketOffset = bucketId * SPARSE_BUCKET_ELEMENT_SIZE_IN_BYTES;
MemorySegment segment = buckets[bucketOffset >>> segmentSizeBits];
int segOffset = bucketOffset & segmentSizeMask;
long currAddress;
while (true) {
currAddress = segment.getLong(segOffset + 8);
if (segment.getLong(segOffset) != key && currAddress != INVALID_ADDRESS) {
// hash conflicts, the bucket is occupied by another key
// TODO test Conflict resolution:
// now: +1 +1 +1... cache friendly but more conflict, so we set factor to 0.5
// other1: +1 +2 +3... less conflict, factor can be 0.75
// other2: Secondary hashCode... less and less conflict, but need compute hash again
bucketId = (bucketId + 1) & numBucketsMask;
if (segOffset + SPARSE_BUCKET_ELEMENT_SIZE_IN_BYTES < segmentSize) {
// if the new bucket still in current segment, we only need to update offset
// within this segment
segOffset += SPARSE_BUCKET_ELEMENT_SIZE_IN_BYTES;
} else {
// otherwise, we should re-calculate segment and offset
bucketOffset = bucketId * 16;
segment = buckets[bucketOffset >>> segmentSizeBits];
segOffset = bucketOffset & segmentSizeMask;
}
} else {
break;
}
}
if (currAddress == INVALID_ADDRESS) {
// this is the first value for this key, put the address in array.
segment.putLong(segOffset, key);
segment.putLong(segOffset + 8, address);
numKeys += 1;
// dataSegment may be null if we only have to rehash bucket area
if (dataSegment != null) {
dataSegment.putLong(currentPositionInSegment, toAddrAndLen(INVALID_ADDRESS, size));
}
if (numKeys * 2 > numBuckets) {
resize();
}
} else {
// there are some values for this key, put the address in the front of them.
dataSegment.putLong(currentPositionInSegment, toAddrAndLen(currAddress, size));
segment.putLong(segOffset + 8, address);
}
}
private int findBucket(int hash) {
return partitionLevelHash(hash) & this.numBucketsMask;
}
private void resize() throws IOException {
MemorySegment[] oldBuckets = this.buckets;
int oldNumBuckets = numBuckets;
int newNumSegs = oldBuckets.length * 2;
int newNumBuckets = MathUtils.roundDownToPowerOf2(newNumSegs * segmentSize / 16);
// request new buckets.
MemorySegment[] newBuckets = new MemorySegment[newNumSegs];
for (int i = 0; i < newNumSegs; i++) {
MemorySegment seg = longTable.getNextBuffer();
if (seg == null) {
final int spilledPart = longTable.spillPartition();
if (spilledPart == partitionNum) {
// this bucket is no longer in-memory
// free new segments.
longTable.returnAll(Arrays.asList(newBuckets));
return;
}
seg = longTable.getNextBuffer();
if (seg == null) {
throw new RuntimeException(
"Bug in HybridHashJoin: No memory became available after spilling a partition.");
}
}
newBuckets[i] = seg;
}
setNewBuckets(newBuckets, newNumBuckets);
reHash(oldBuckets, oldNumBuckets);
}
private void reHash(MemorySegment[] oldBuckets, int oldNumBuckets) throws IOException {
long reHashStartTime = System.currentTimeMillis();
int bucketOffset = 0;
MemorySegment segment = oldBuckets[bucketOffset];
int segOffset = 0;
for (int i = 0; i < oldNumBuckets; i++) {
long address = segment.getLong(segOffset + 8);
if (address != INVALID_ADDRESS) {
long key = segment.getLong(segOffset);
// size/dataSegment/currentPositionInSegment should never be used.
updateIndex(key, hashLong(key, recursionLevel), address, 0, null, 0);
}
// not last bucket, move to next.
if (i != oldNumBuckets - 1) {
if (segOffset + 16 < segmentSize) {
segOffset += 16;
} else {
segment = oldBuckets[++bucketOffset];
segOffset = 0;
}
}
}
longTable.returnAll(Arrays.asList(oldBuckets));
LOG.info(
"The rehash take {} ms for {} segments",
(System.currentTimeMillis() - reHashStartTime),
numBuckets);
}
public MemorySegment[] getBuckets() {
return buckets;
}
int getBuildSideBlockCount() {
return this.partitionBuffers == null
? this.buildSideWriteBuffer.getBlockCount()
: this.partitionBuffers.length;
}
int getProbeSideBlockCount() {
return this.probeSideBuffer == null ? -1 : this.probeSideBuffer.getBlockCount();
}
BlockChannelWriter<MemorySegment> getBuildSideChannel() {
return this.buildSideChannel;
}
int getPartitionNumber() {
return this.partitionNum;
}
MemorySegment[] getPartitionBuffers() {
return partitionBuffers;
}
int getRecursionLevel() {
return this.recursionLevel;
}
int getNumOccupiedMemorySegments() {
// either the number of memory segments, or one for spilling
final int numPartitionBuffers =
this.partitionBuffers != null
? this.partitionBuffers.length
: this.buildSideWriteBuffer.getNumOccupiedMemorySegments();
return numPartitionBuffers + buckets.length;
}
int spillPartition(
IOManager ioAccess,
FileIOChannel.ID targetChannel,
LinkedBlockingQueue<MemorySegment> bufferReturnQueue)
throws IOException {
// sanity checks
if (!isInMemory()) {
throw new RuntimeException(
"Bug in Hybrid Hash Join: "
+ "Request to spill a partition that has already been spilled.");
}
if (getNumOccupiedMemorySegments() < 2) {
throw new RuntimeException(
"Bug in Hybrid Hash Join: "
+ "Request to spill a partition with less than two buffers.");
}
// create the channel block writer and spill the current buffers
// that keep the build side buffers current block, as it is most likely not full, yet
// we return the number of blocks that become available
this.buildSideChannel =
FileChannelUtil.createBlockChannelWriter(
ioAccess,
targetChannel,
bufferReturnQueue,
longTable.compressionEnabled(),
longTable.compressionCodecFactory(),
longTable.compressionBlockSize(),
segmentSize);
return this.buildSideWriteBuffer.spill(this.buildSideChannel);
}
/**
* After build phase.
*
* @return build spill return buffer, if have spilled, it returns the current write buffer,
* because it was used all the time in build phase, so it can only be returned at this time.
*/
int finalizeBuildPhase(IOManager ioAccess, FileIOChannel.Enumerator probeChannelEnumerator)
throws IOException {
this.finalBufferLimit = this.buildSideWriteBuffer.getCurrentPositionInSegment();
this.partitionBuffers = this.buildSideWriteBuffer.close();
if (!isInMemory()) {
// close the channel.
this.buildSideChannel.close();
this.probeSideBuffer =
FileChannelUtil.createOutputView(
ioAccess,
probeChannelEnumerator.next(),
longTable.compressionEnabled(),
longTable.compressionCodecFactory(),
longTable.compressionBlockSize(),
segmentSize);
return 1;
} else {
return 0;
}
}
void finalizeProbePhase(List<LongHashPartition> spilledPartitions) throws IOException {
if (isInMemory()) {
releaseBuckets();
longTable.returnAll(Arrays.asList(partitionBuffers));
this.partitionBuffers = null;
} else {
if (this.probeSideRecordCounter == 0) {
// delete the spill files
this.probeSideBuffer.close();
this.buildSideChannel.deleteChannel();
this.probeSideBuffer.getChannel().deleteChannel();
} else {
// flush the last probe side buffer and register this partition as pending
probeNumBytesInLastSeg = this.probeSideBuffer.close();
spilledPartitions.add(this);
}
}
}
final PartitionIterator newPartitionIterator() {
return new PartitionIterator();
}
final int getLastSegmentLimit() {
return this.finalBufferLimit;
}
// ------------------ PagedInputView for read --------------------
@Override
public void setReadPosition(long pointer) {
final int bufferNum = (int) (pointer >>> this.segmentSizeBits);
final int offset = (int) (pointer & segmentSizeMask);
this.currentBufferNum = bufferNum;
seekInput(
this.partitionBuffers[bufferNum],
offset,
bufferNum < partitionBuffers.length - 1 ? segmentSize : finalBufferLimit);
}
@Override
protected MemorySegment nextSegment(MemorySegment current) throws IOException {
this.currentBufferNum++;
if (this.currentBufferNum < this.partitionBuffers.length) {
return this.partitionBuffers[this.currentBufferNum];
} else {
throw new EOFException();
}
}
@Override
protected int getLimitForSegment(MemorySegment segment) {
return segment == partitionBuffers[partitionBuffers.length - 1]
? finalBufferLimit
: segmentSize;
}
boolean isInMemory() {
return buildSideChannel == null;
}
final void insertIntoProbeBuffer(BinaryRowDataSerializer probeSer, BinaryRowData record)
throws IOException {
probeSer.serialize(record, this.probeSideBuffer);
this.probeSideRecordCounter++;
}
long getBuildSideRecordCount() {
return buildSideRecordCounter;
}
long getMinKey() {
return minKey;
}
long getMaxKey() {
return maxKey;
}
private void updateMinMax(long key) {
if (key < minKey) {
minKey = key;
}
if (key > maxKey) {
maxKey = key;
}
}
void insertIntoBucket(long key, int hashCode, int size, long address) throws IOException {
this.buildSideRecordCounter++;
updateMinMax(key);
final int bufferNum = (int) (address >>> this.segmentSizeBits);
final int offset = (int) (address & (this.segmentSize - 1));
updateIndex(key, hashCode, address, size, partitionBuffers[bufferNum], offset);
}
void insertIntoTable(long key, int hashCode, BinaryRowData row) throws IOException {
this.buildSideRecordCounter++;
updateMinMax(key);
int sizeInBytes = row.getSizeInBytes();
if (sizeInBytes >= (1 << SIZE_BITS)) {
throw new UnsupportedOperationException(
"Does not support row that is larger than 256M");
}
if (isInMemory()) {
checkWriteAdvance();
// after advance, we may run out memory and spill this partition, check still in memory
// again
if (isInMemory()) {
updateIndex(
key,
hashCode,
buildSideWriteBuffer.getCurrentPointer(),
sizeInBytes,
buildSideWriteBuffer.getCurrentSegment(),
buildSideWriteBuffer.getCurrentPositionInSegment());
} else {
buildSideWriteBuffer
.getCurrentSegment()
.putLong(
buildSideWriteBuffer.getCurrentPositionInSegment(),
toAddrAndLen(INVALID_ADDRESS, sizeInBytes));
}
buildSideWriteBuffer.skipBytesToWrite(8);
if (row.getSegments().length == 1) {
buildSideWriteBuffer.write(row.getSegments()[0], row.getOffset(), sizeInBytes);
} else {
BinaryRowDataSerializer.serializeWithoutLengthSlow(row, buildSideWriteBuffer);
}
} else {
serializeToPages(row);
}
}
public void serializeToPages(BinaryRowData row) throws IOException {
int sizeInBytes = row.getSizeInBytes();
checkWriteAdvance();
buildSideWriteBuffer
.getCurrentSegment()
.putLong(
buildSideWriteBuffer.getCurrentPositionInSegment(),
toAddrAndLen(INVALID_ADDRESS, row.getSizeInBytes()));
buildSideWriteBuffer.skipBytesToWrite(8);
if (row.getSegments().length == 1) {
buildSideWriteBuffer.write(row.getSegments()[0], row.getOffset(), sizeInBytes);
} else {
BinaryRowDataSerializer.serializeWithoutLengthSlow(row, buildSideWriteBuffer);
}
}
void releaseBuckets() {
if (buckets != null) {
longTable.returnAll(Arrays.asList(buckets));
buckets = null;
}
}
// ------------------ PagedInputView for read end --------------------
/** Write Buffer. */
private class BuildSideBuffer extends AbstractPagedOutputView {
private final ArrayList<MemorySegment> targetList;
private int currentBlockNumber;
private BlockChannelWriter<MemorySegment> writer;
private BuildSideBuffer(MemorySegment segment) {
super(segment, segment.size(), 0);
this.targetList = new ArrayList<>();
}
@Override
protected MemorySegment nextSegment(MemorySegment current, int positionInCurrent)
throws IOException {
final MemorySegment next;
if (this.writer == null) {
// Must first add current segment:
// This may happen when you need to spill:
// A partition called nextSegment, can not get memory, need to spill, the result
// give itself to the spill, Since it is switching currentSeg, it is necessary
// to give the previous currSeg to spill.
this.targetList.add(current);
next = longTable.nextSegment();
} else {
this.writer.writeBlock(current);
try {
next = this.writer.getReturnQueue().take();
} catch (InterruptedException iex) {
throw new IOException(
"Hash Join Partition was interrupted while "
+ "grabbing a new write-behind buffer.");
}
}
this.currentBlockNumber++;
return next;
}
long getCurrentPointer() {
return (((long) this.currentBlockNumber) << segmentSizeBits)
+ getCurrentPositionInSegment();
}
int getBlockCount() {
return this.currentBlockNumber + 1;
}
int getNumOccupiedMemorySegments() {
// return the current segment + all filled segments
return this.targetList.size() + 1;
}
int spill(BlockChannelWriter<MemorySegment> writer) throws IOException {
this.writer = writer;
final int numSegments = this.targetList.size();
for (MemorySegment segment : this.targetList) {
this.writer.writeBlock(segment);
}
this.targetList.clear();
return numSegments;
}
MemorySegment[] close() throws IOException {
final MemorySegment current = getCurrentSegment();
if (current == null) {
throw new IllegalStateException(
"Illegal State in LongHashTable: "
+ "No current buffer when finalizing build side.");
}
clear();
if (this.writer == null) {
this.targetList.add(current);
MemorySegment[] buffers = this.targetList.toArray(new MemorySegment[0]);
this.targetList.clear();
return buffers;
} else {
writer.writeBlock(current);
return null;
}
}
}
/** Iterator for probe match. */
public class MatchIterator implements RowIterator<BinaryRowData> {
private long address;
public void set(long address) {
this.address = address;
}
@Override
public boolean advanceNext() {
if (address != INVALID_ADDRESS) {
setReadPosition(address);
long addrAndLen = getCurrentSegment().getLong(getCurrentPositionInSegment());
this.address = toAddress(addrAndLen);
int size = toLength(addrAndLen);
try {
skipBytesToRead(8);
buildSideSerializer.pointTo(size, buildReuseRow, LongHashPartition.this);
} catch (IOException e) {
throw new RuntimeException(e);
}
return true;
}
return false;
}
@Override
public BinaryRowData getRow() {
return buildReuseRow;
}
}
void clearAllMemory(LazyMemorySegmentPool pool) {
// return current buffers from build side and probe side
if (this.buildSideWriteBuffer != null) {
if (this.buildSideWriteBuffer.getCurrentSegment() != null) {
pool.returnPage(this.buildSideWriteBuffer.getCurrentSegment());
}
pool.returnAll(this.buildSideWriteBuffer.targetList);
this.buildSideWriteBuffer.targetList.clear();
this.buildSideWriteBuffer = null;
}
releaseBuckets();
// return the partition buffers
if (this.partitionBuffers != null) {
pool.returnAll(Arrays.asList(this.partitionBuffers));
this.partitionBuffers = null;
}
// clear the channels
try {
if (this.buildSideChannel != null) {
this.buildSideChannel.close();
this.buildSideChannel.deleteChannel();
}
if (this.probeSideBuffer != null) {
this.probeSideBuffer.getChannel().closeAndDelete();
this.probeSideBuffer = null;
}
} catch (IOException ioex) {
throw new RuntimeException(
"Error deleting the partition files. "
+ "Some temporary files might not be removed.",
ioex);
}
}
/**
* For spilled partition to rebuild index and hashcode when memory can store all the build side
* data. (After bulk load to memory, see {@link BulkBlockChannelReader}).
*/
final class PartitionIterator implements RowIterator<BinaryRowData> {
private long currentPointer;
private BinaryRowData reuse;
private PartitionIterator() {
this.reuse = buildSideSerializer.createInstance();
setReadPosition(0);
}
@Override
public boolean advanceNext() {
try {
checkReadAdvance();
int pos = getCurrentPositionInSegment();
this.currentPointer = (((long) currentBufferNum) << segmentSizeBits) + pos;
long addrAndLen = getCurrentSegment().getLong(pos);
skipBytesToRead(8);
buildSideSerializer.pointTo(toLength(addrAndLen), reuse, LongHashPartition.this);
return true;
} catch (EOFException e) {
return false;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
long getPointer() {
return this.currentPointer;
}
@Override
public BinaryRowData getRow() {
return this.reuse;
}
}
private void checkWriteAdvance() throws IOException {
if (shouldAdvance(
buildSideWriteBuffer.getSegmentSize()
- buildSideWriteBuffer.getCurrentPositionInSegment(),
buildSideSerializer)) {
buildSideWriteBuffer.advance();
}
}
private void checkReadAdvance() throws IOException {
if (shouldAdvance(
getCurrentSegmentLimit() - getCurrentPositionInSegment(), buildSideSerializer)) {
advance();
}
}
private static boolean shouldAdvance(int available, BinaryRowDataSerializer serializer) {
return available < 8 + serializer.getFixedLengthPartSize();
}
public static void deserializeFromPages(
BinaryRowData reuse,
ChannelReaderInputView inView,
BinaryRowDataSerializer buildSideSerializer)
throws IOException {
if (shouldAdvance(
inView.getCurrentSegmentLimit() - inView.getCurrentPositionInSegment(),
buildSideSerializer)) {
inView.advance();
}
MemorySegment segment = (reuse.getSegments() != null) ? reuse.getSegments()[0] : null;
int length =
toLength(inView.getCurrentSegment().getLong(inView.getCurrentPositionInSegment()));
inView.skipBytesToRead(8);
if (segment == null || segment.size() < length) {
segment = MemorySegmentFactory.wrap(new byte[length]);
}
inView.readFully(segment.getHeapMemory(), 0, length);
reuse.pointTo(segment, 0, length);
}
void iteratorToDenseBucket(
MemorySegment[] denseBuckets, long addressOffset, long globalMinKey) {
int bucketOffset = 0;
MemorySegment segment = buckets[bucketOffset];
int segOffset = 0;
for (int i = 0; i < numBuckets; i++) {
long address = segment.getLong(segOffset + 8);
if (address != INVALID_ADDRESS) {
long key = segment.getLong(segOffset);
long denseBucket = key - globalMinKey;
long denseBucketOffset = denseBucket << 3;
int denseSegIndex = (int) (denseBucketOffset >>> segmentSizeBits);
int denseSegOffset = (int) (denseBucketOffset & segmentSizeMask);
denseBuckets[denseSegIndex].putLong(denseSegOffset, address + addressOffset);
}
// not last bucket, move to next.
if (i != numBuckets - 1) {
if (segOffset + 16 < segmentSize) {
segOffset += 16;
} else {
segment = buckets[++bucketOffset];
segOffset = 0;
}
}
}
}
void updateDenseAddressOffset(long addressOffset) {
if (addressOffset != 0) {
setReadPosition(0);
while (true) {
try {
checkReadAdvance();
long addrAndLen = getCurrentSegment().getLong(getCurrentPositionInSegment());
long address = LongHashPartition.toAddress(addrAndLen);
int len = LongHashPartition.toLength(addrAndLen);
if (address != INVALID_ADDRESS) {
getCurrentSegment()
.putLong(
getCurrentPositionInSegment(),
LongHashPartition.toAddrAndLen(
address + addressOffset, len));
}
skipBytesToRead(8 + len);
} catch (EOFException e) {
break;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
}
|
apache/nifi | 36,446 | nifi-extension-bundles/nifi-extension-utils/nifi-git-flow-registry/src/main/java/org/apache/nifi/registry/flow/git/AbstractGitFlowRegistryClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.registry.flow.git;
import org.apache.nifi.components.DescribedValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.flow.ConnectableComponent;
import org.apache.nifi.flow.Position;
import org.apache.nifi.flow.VersionedComponent;
import org.apache.nifi.flow.VersionedConnection;
import org.apache.nifi.flow.VersionedFlowCoordinates;
import org.apache.nifi.flow.VersionedParameter;
import org.apache.nifi.flow.VersionedParameterContext;
import org.apache.nifi.flow.VersionedProcessGroup;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.registry.flow.AbstractFlowRegistryClient;
import org.apache.nifi.registry.flow.AuthorizationException;
import org.apache.nifi.registry.flow.BucketLocation;
import org.apache.nifi.registry.flow.FlowAlreadyExistsException;
import org.apache.nifi.registry.flow.FlowLocation;
import org.apache.nifi.registry.flow.FlowRegistryBranch;
import org.apache.nifi.registry.flow.FlowRegistryBucket;
import org.apache.nifi.registry.flow.FlowRegistryClientConfigurationContext;
import org.apache.nifi.registry.flow.FlowRegistryClientInitializationContext;
import org.apache.nifi.registry.flow.FlowRegistryException;
import org.apache.nifi.registry.flow.FlowRegistryPermissions;
import org.apache.nifi.registry.flow.FlowVersionLocation;
import org.apache.nifi.registry.flow.RegisterAction;
import org.apache.nifi.registry.flow.RegisteredFlow;
import org.apache.nifi.registry.flow.RegisteredFlowSnapshot;
import org.apache.nifi.registry.flow.RegisteredFlowSnapshotMetadata;
import org.apache.nifi.registry.flow.git.client.GitCommit;
import org.apache.nifi.registry.flow.git.client.GitCreateContentRequest;
import org.apache.nifi.registry.flow.git.client.GitRepositoryClient;
import org.apache.nifi.registry.flow.git.serialize.FlowSnapshotSerializer;
import org.apache.nifi.registry.flow.git.serialize.JacksonFlowSnapshotSerializer;
import org.apache.nifi.util.StringUtils;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Base class for git-based flow registry clients.
*/
public abstract class AbstractGitFlowRegistryClient extends AbstractFlowRegistryClient {
public static final PropertyDescriptor REPOSITORY_BRANCH = new PropertyDescriptor.Builder()
.name("Default Branch")
.description("The default branch to use for this client")
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.defaultValue("main")
.required(true)
.build();
public static final PropertyDescriptor REPOSITORY_PATH = new PropertyDescriptor.Builder()
.name("Repository Path")
.description("The path in the repository that this client will use to store all data. " +
"If left blank, then the root of the repository will be used.")
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.required(false)
.build();
public static final PropertyDescriptor DIRECTORY_FILTER_EXCLUDE = new PropertyDescriptor.Builder()
.name("Directory Filter Exclusion")
.description("Directories whose names match the given regular expression will be ignored "
+ "when listing buckets.")
.defaultValue("[.].*")
.addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
.required(true)
.build();
public static final PropertyDescriptor PARAMETER_CONTEXT_VALUES = new PropertyDescriptor.Builder()
.name("Parameter Context Values")
.description("Specifies what to do with parameter values when storing the versioned flow.")
.allowableValues(ParameterContextValuesStrategy.class)
.defaultValue(ParameterContextValuesStrategy.RETAIN)
.required(true)
.build();
static final String DEFAULT_BUCKET_NAME = "default";
static final String DEFAULT_BUCKET_KEEP_FILE_PATH = DEFAULT_BUCKET_NAME + "/.keep";
static final String DEFAULT_BUCKET_KEEP_FILE_CONTENT = "Do Not Delete";
static final String DEFAULT_BUCKET_KEEP_FILE_MESSAGE = "Creating default bucket";
static final String REGISTER_FLOW_MESSAGE_PREFIX = "Registering Flow";
static final String REGISTER_FLOW_MESSAGE_FORMAT = REGISTER_FLOW_MESSAGE_PREFIX + " [%s]";
static final String DEREGISTER_FLOW_MESSAGE_FORMAT = "Deregistering Flow [%s]";
static final String DEFAULT_FLOW_SNAPSHOT_MESSAGE_FORMAT = "Saving Flow Snapshot %s";
static final String SNAPSHOT_FILE_EXTENSION = ".json";
static final String SNAPSHOT_FILE_PATH_FORMAT = "%s/%s" + SNAPSHOT_FILE_EXTENSION;
static final String FLOW_CONTENTS_GROUP_ID = "flow-contents-group";
private volatile FlowSnapshotSerializer flowSnapshotSerializer;
private volatile GitRepositoryClient repositoryClient;
private volatile Pattern directoryExclusionPattern;
private final AtomicBoolean clientInitialized = new AtomicBoolean(false);
private volatile List<PropertyDescriptor> propertyDescriptors;
@Override
public void initialize(final FlowRegistryClientInitializationContext context) {
super.initialize(context);
final List<PropertyDescriptor> combinedPropertyDescriptors = new ArrayList<>(createPropertyDescriptors());
combinedPropertyDescriptors.add(REPOSITORY_BRANCH);
combinedPropertyDescriptors.add(REPOSITORY_PATH);
combinedPropertyDescriptors.add(DIRECTORY_FILTER_EXCLUDE);
combinedPropertyDescriptors.add(PARAMETER_CONTEXT_VALUES);
propertyDescriptors = Collections.unmodifiableList(combinedPropertyDescriptors);
flowSnapshotSerializer = createFlowSnapshotSerializer();
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return propertyDescriptors;
}
@Override
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>(super.customValidate(validationContext));
final String repoPath = validationContext.getProperty(REPOSITORY_PATH).getValue();
if (repoPath != null && (repoPath.startsWith("/") || repoPath.endsWith("/"))) {
results.add(new ValidationResult.Builder()
.subject(REPOSITORY_PATH.getDisplayName())
.valid(false)
.explanation("Path can not start or end with /")
.build());
}
return results;
}
@Override
public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
super.onPropertyModified(descriptor, oldValue, newValue);
synchronized (this) {
invalidateClient();
}
}
@Override
public boolean isBranchingSupported(final FlowRegistryClientConfigurationContext context) {
return true;
}
@Override
public Set<FlowRegistryBranch> getBranches(final FlowRegistryClientConfigurationContext context) throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
return repositoryClient.getBranches().stream()
.map(branchName -> {
final FlowRegistryBranch flowRegistryBranch = new FlowRegistryBranch();
flowRegistryBranch.setName(branchName);
return flowRegistryBranch;
}).collect(Collectors.toSet());
}
@Override
public FlowRegistryBranch getDefaultBranch(final FlowRegistryClientConfigurationContext context) {
final FlowRegistryBranch defaultBranch = new FlowRegistryBranch();
defaultBranch.setName(context.getProperty(REPOSITORY_BRANCH).getValue());
return defaultBranch;
}
@Override
public Set<FlowRegistryBucket> getBuckets(final FlowRegistryClientConfigurationContext context, final String branch) throws IOException, FlowRegistryException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
final Set<FlowRegistryBucket> buckets = repositoryClient.getTopLevelDirectoryNames(branch).stream()
.filter(bucketName -> !directoryExclusionPattern.matcher(bucketName).matches())
.map(bucketName -> createFlowRegistryBucket(repositoryClient, bucketName))
.collect(Collectors.toSet());
// if the repository has no top-level directories, then return a default bucket entry, this won't exist in the repository until the first time a flow is saved to it
return buckets.isEmpty() ? Set.of(createFlowRegistryBucket(repositoryClient, DEFAULT_BUCKET_NAME)) : buckets;
}
@Override
public FlowRegistryBucket getBucket(final FlowRegistryClientConfigurationContext context, final BucketLocation bucketLocation) throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
return createFlowRegistryBucket(repositoryClient, bucketLocation.getBucketId());
}
@Override
public RegisteredFlow registerFlow(final FlowRegistryClientConfigurationContext context, final RegisteredFlow flow) throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyWritePermissions(repositoryClient);
final String branch = flow.getBranch();
final FlowLocation flowLocation = new FlowLocation(branch, flow.getBucketIdentifier(), flow.getIdentifier());
final String filePath = getSnapshotFilePath(flowLocation);
final String commitMessage = REGISTER_FLOW_MESSAGE_FORMAT.formatted(flow.getIdentifier());
final Optional<String> existingFileSha = repositoryClient.getContentSha(filePath, branch);
if (existingFileSha.isPresent()) {
throw new FlowAlreadyExistsException("Another flow is already registered at [" + filePath + "] on branch [" + branch + "]");
}
// Clear values we don't want in the json stored in Git
final String originalBucketId = flow.getBucketIdentifier();
flow.setBucketIdentifier(null);
flow.setBucketName(null);
flow.setBranch(null);
final RegisteredFlowSnapshot flowSnapshot = new RegisteredFlowSnapshot();
flowSnapshot.setFlow(flow);
final GitCreateContentRequest request = GitCreateContentRequest.builder()
.branch(branch)
.path(filePath)
.content(flowSnapshotSerializer.serialize(flowSnapshot))
.message(commitMessage)
.build();
repositoryClient.createContent(request);
// Re-populate fields before returning
flow.setBucketName(originalBucketId);
flow.setBucketIdentifier(originalBucketId);
flow.setBranch(branch);
return flow;
}
@Override
public RegisteredFlow deregisterFlow(final FlowRegistryClientConfigurationContext context, final FlowLocation flowLocation) throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyWritePermissions(repositoryClient);
final String branch = flowLocation.getBranch();
final String filePath = getSnapshotFilePath(flowLocation);
final String commitMessage = DEREGISTER_FLOW_MESSAGE_FORMAT.formatted(flowLocation.getFlowId());
try (final InputStream deletedSnapshotContent = repositoryClient.deleteContent(filePath, commitMessage, branch)) {
final RegisteredFlowSnapshot deletedSnapshot = getSnapshot(deletedSnapshotContent);
populateFlowAndSnapshotMetadata(deletedSnapshot, flowLocation);
updateBucketReferences(repositoryClient, deletedSnapshot, flowLocation.getBucketId());
return deletedSnapshot.getFlow();
}
}
@Override
public RegisteredFlow getFlow(final FlowRegistryClientConfigurationContext context, final FlowLocation flowLocation) throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
final String branch = flowLocation.getBranch();
final String filePath = getSnapshotFilePath(flowLocation);
final RegisteredFlowSnapshot existingSnapshot = getSnapshot(repositoryClient, filePath, branch);
populateFlowAndSnapshotMetadata(existingSnapshot, flowLocation);
updateBucketReferences(repositoryClient, existingSnapshot, flowLocation.getBucketId());
final RegisteredFlow registeredFlow = existingSnapshot.getFlow();
registeredFlow.setBranch(branch);
return registeredFlow;
}
@Override
public Set<RegisteredFlow> getFlows(final FlowRegistryClientConfigurationContext context, final BucketLocation bucketLocation) throws IOException, FlowRegistryException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
final String branch = bucketLocation.getBranch();
final String bucketId = bucketLocation.getBucketId();
return repositoryClient.getFileNames(bucketId, branch).stream()
.filter(filename -> filename.endsWith(SNAPSHOT_FILE_EXTENSION))
.map(filename -> mapToRegisteredFlow(bucketLocation, filename))
.collect(Collectors.toSet());
}
@Override
public RegisteredFlowSnapshot getFlowContents(final FlowRegistryClientConfigurationContext context, final FlowVersionLocation flowVersionLocation)
throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
final String version = flowVersionLocation.getVersion();
final String filePath = getSnapshotFilePath(flowVersionLocation);
final InputStream inputStream = repositoryClient.getContentFromCommit(filePath, version);
final RegisteredFlowSnapshot flowSnapshot = getSnapshot(inputStream);
populateFlowAndSnapshotMetadata(flowSnapshot, flowVersionLocation);
// populate values that aren't store in Git
flowSnapshot.getSnapshotMetadata().setVersion(version);
flowSnapshot.getSnapshotMetadata().setBranch(flowVersionLocation.getBranch());
flowSnapshot.getFlow().setBranch(flowVersionLocation.getBranch());
// populate outgoing bucket references
updateBucketReferences(repositoryClient, flowSnapshot, flowVersionLocation.getBucketId());
// determine if the version is the "latest" version by comparing to the response of getLatestVersion
final String latestVersion = getLatestVersion(context, flowVersionLocation).orElse(null);
flowSnapshot.setLatest(version.equals(latestVersion));
return flowSnapshot;
}
@Override
public RegisteredFlowSnapshot registerFlowSnapshot(final FlowRegistryClientConfigurationContext context, final RegisteredFlowSnapshot flowSnapshot, final RegisterAction action)
throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyWritePermissions(repositoryClient);
final RegisteredFlowSnapshotMetadata snapshotMetadata = flowSnapshot.getSnapshotMetadata();
final String branch = snapshotMetadata.getBranch();
final FlowLocation flowLocation = new FlowLocation(snapshotMetadata.getBranch(), snapshotMetadata.getBucketIdentifier(), snapshotMetadata.getFlowIdentifier());
final String filePath = getSnapshotFilePath(flowLocation);
final String previousSha = repositoryClient.getContentSha(filePath, branch).orElse(null);
final String snapshotComments = snapshotMetadata.getComments();
final String commitMessage = StringUtils.isBlank(snapshotComments) ? DEFAULT_FLOW_SNAPSHOT_MESSAGE_FORMAT.formatted(flowLocation.getFlowId()) : snapshotComments;
final RegisteredFlowSnapshot existingSnapshot = getSnapshot(repositoryClient, filePath, branch);
populateFlowAndSnapshotMetadata(existingSnapshot, flowLocation);
final RegisteredFlow existingFlow = existingSnapshot.getFlow();
existingFlow.setBranch(null);
flowSnapshot.setFlow(existingFlow);
// Clear values we don't want stored in the json in Git
flowSnapshot.setBucket(null);
flowSnapshot.getSnapshotMetadata().setBucketIdentifier(null);
flowSnapshot.getSnapshotMetadata().setBranch(null);
flowSnapshot.getSnapshotMetadata().setVersion(null);
flowSnapshot.getSnapshotMetadata().setComments(null);
final ParameterContextValuesStrategy parameterContextValuesStrategy = context.getProperty(PARAMETER_CONTEXT_VALUES).asAllowableValue(ParameterContextValuesStrategy.class);
final Map<String, VersionedParameterContext> parameterContexts = flowSnapshot.getParameterContexts();
if (parameterContexts != null) {
if (ParameterContextValuesStrategy.REMOVE.equals(parameterContextValuesStrategy)) {
// remove all parameter values if configured to do so
parameterContexts.forEach((name, parameterContext) ->
parameterContext.getParameters().forEach(parameter -> parameter.setValue(null))
);
} else if (ParameterContextValuesStrategy.IGNORE_CHANGES.equals(parameterContextValuesStrategy)) {
// ignore changes on existing parameters if configured to do so
final Map<String, VersionedParameterContext> existingParameterContexts = existingSnapshot.getParameterContexts();
if (existingParameterContexts != null) {
existingParameterContexts.forEach((name, parameterContext) -> {
final VersionedParameterContext targetContext = parameterContexts.get(name);
if (targetContext != null) {
final Map<String, VersionedParameter> targetParamMap = targetContext.getParameters()
.stream()
.collect(Collectors.toMap(VersionedParameter::getName, Function.identity()));
parameterContext.getParameters().forEach(parameter -> {
final VersionedParameter targetParam = targetParamMap.get(parameter.getName());
if (targetParam != null) {
targetParam.setValue(parameter.getValue());
}
});
}
});
}
}
}
// replace the id of the top level group and all of its references with a constant value prior to serializing to avoid
// unnecessary diffs when different instances of the same flow are imported and have different top-level PG ids
final String originalFlowContentsGroupId = replaceGroupId(flowSnapshot.getFlowContents(), FLOW_CONTENTS_GROUP_ID);
final Position originalFlowContentsPosition = replacePosition(flowSnapshot.getFlowContents(), new Position(0, 0));
final GitCreateContentRequest createContentRequest = GitCreateContentRequest.builder()
.branch(branch)
.path(filePath)
.content(flowSnapshotSerializer.serialize(flowSnapshot))
.message(commitMessage)
.existingContentSha(previousSha)
.build();
final String createContentCommitSha = repositoryClient.createContent(createContentRequest);
final VersionedFlowCoordinates versionedFlowCoordinates = new VersionedFlowCoordinates();
versionedFlowCoordinates.setRegistryId(getIdentifier());
versionedFlowCoordinates.setBranch(flowLocation.getBranch());
versionedFlowCoordinates.setBucketId(flowLocation.getBucketId());
versionedFlowCoordinates.setFlowId(flowLocation.getFlowId());
versionedFlowCoordinates.setVersion(createContentCommitSha);
versionedFlowCoordinates.setStorageLocation(getStorageLocation(repositoryClient));
flowSnapshot.getFlowContents().setVersionedFlowCoordinates(versionedFlowCoordinates);
flowSnapshot.getFlow().setBranch(branch);
flowSnapshot.getSnapshotMetadata().setBranch(branch);
flowSnapshot.getSnapshotMetadata().setVersion(createContentCommitSha);
flowSnapshot.setLatest(true);
// populate outgoing bucket references
updateBucketReferences(repositoryClient, flowSnapshot, flowLocation.getBucketId());
// set back to the original id so that the returned snapshot is has the correct values from what was passed in
replaceGroupId(flowSnapshot.getFlowContents(), originalFlowContentsGroupId);
replacePosition(flowSnapshot.getFlowContents(), originalFlowContentsPosition);
return flowSnapshot;
}
@Override
public Set<RegisteredFlowSnapshotMetadata> getFlowVersions(final FlowRegistryClientConfigurationContext context, final FlowLocation flowLocation)
throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
final String branch = flowLocation.getBranch();
final String filePath = getSnapshotFilePath(flowLocation);
final Set<RegisteredFlowSnapshotMetadata> snapshotMetadataSet = new LinkedHashSet<>();
for (final GitCommit commit : repositoryClient.getCommits(filePath, branch)) {
final RegisteredFlowSnapshotMetadata snapshotMetadata = createSnapshotMetadata(commit, flowLocation);
if (snapshotMetadata.getComments() != null && snapshotMetadata.getComments().startsWith(REGISTER_FLOW_MESSAGE_PREFIX)) {
continue;
}
snapshotMetadataSet.add(snapshotMetadata);
}
return snapshotMetadataSet;
}
@Override
public Optional<String> getLatestVersion(final FlowRegistryClientConfigurationContext context, final FlowLocation flowLocation) throws FlowRegistryException, IOException {
final GitRepositoryClient repositoryClient = getRepositoryClient(context);
verifyReadPermissions(repositoryClient);
final String branch = flowLocation.getBranch();
final String filePath = getSnapshotFilePath(flowLocation);
final List<GitCommit> commits = repositoryClient.getCommits(filePath, branch);
final String latestVersion = commits.isEmpty() ? null : commits.getFirst().id();
return Optional.ofNullable(latestVersion);
}
@Override
public String generateFlowId(final String flowName) {
return flowName.trim()
.replaceAll("\\s", "-") // replace whitespace with -
.replaceAll("[^a-zA-Z0-9-]", "") // replace all other invalid chars with empty string
.replaceAll("(-)\\1+", "$1"); // replace consecutive - with single -
}
private FlowRegistryBucket createFlowRegistryBucket(final GitRepositoryClient repositoryClient, final String name) {
final FlowRegistryPermissions bucketPermissions = new FlowRegistryPermissions();
bucketPermissions.setCanRead(repositoryClient.hasReadPermission());
bucketPermissions.setCanWrite(repositoryClient.hasWritePermission());
bucketPermissions.setCanDelete(repositoryClient.hasWritePermission());
final FlowRegistryBucket bucket = new FlowRegistryBucket();
bucket.setIdentifier(name);
bucket.setName(name);
bucket.setPermissions(bucketPermissions);
return bucket;
}
private RegisteredFlowSnapshotMetadata createSnapshotMetadata(final GitCommit commit, final FlowLocation flowLocation) throws IOException {
final RegisteredFlowSnapshotMetadata snapshotMetadata = new RegisteredFlowSnapshotMetadata();
snapshotMetadata.setBranch(flowLocation.getBranch());
snapshotMetadata.setBucketIdentifier(flowLocation.getBucketId());
snapshotMetadata.setFlowIdentifier(flowLocation.getFlowId());
snapshotMetadata.setVersion(commit.id());
snapshotMetadata.setAuthor(commit.author());
snapshotMetadata.setComments(commit.message());
snapshotMetadata.setTimestamp(commit.commitDate().toEpochMilli());
return snapshotMetadata;
}
private RegisteredFlow mapToRegisteredFlow(final BucketLocation bucketLocation, final String filename) {
final String branch = bucketLocation.getBranch();
final String bucketId = bucketLocation.getBucketId();
final String flowId = filename.replace(SNAPSHOT_FILE_EXTENSION, "");
final RegisteredFlow registeredFlow = new RegisteredFlow();
registeredFlow.setIdentifier(flowId);
registeredFlow.setName(flowId);
registeredFlow.setBranch(branch);
registeredFlow.setBucketIdentifier(bucketId);
registeredFlow.setBucketName(bucketId);
return registeredFlow;
}
private String getSnapshotFilePath(final FlowLocation flowLocation) {
return SNAPSHOT_FILE_PATH_FORMAT.formatted(flowLocation.getBucketId(), flowLocation.getFlowId());
}
private RegisteredFlowSnapshot getSnapshot(final GitRepositoryClient repositoryClient, final String filePath, final String branch) throws IOException, FlowRegistryException {
try (final InputStream contentInputStream = repositoryClient.getContentFromBranch(filePath, branch)) {
return flowSnapshotSerializer.deserialize(contentInputStream);
}
}
private RegisteredFlowSnapshot getSnapshot(final InputStream inputStream) throws IOException {
try {
return flowSnapshotSerializer.deserialize(inputStream);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (final IOException ignored) {
// Close quietly
}
}
}
}
private Position replacePosition(final VersionedProcessGroup group, final Position newPosition) {
final Position originalPosition = group.getPosition();
group.setPosition(newPosition);
return originalPosition;
}
private String replaceGroupId(final VersionedProcessGroup group, final String newGroupId) {
final String originalGroupId = group.getIdentifier();
group.setIdentifier(newGroupId);
replaceGroupId(group.getProcessGroups(), newGroupId);
replaceGroupId(group.getRemoteProcessGroups(), newGroupId);
replaceGroupId(group.getProcessors(), newGroupId);
replaceGroupId(group.getFunnels(), newGroupId);
replaceGroupId(group.getLabels(), newGroupId);
replaceGroupId(group.getInputPorts(), newGroupId);
replaceGroupId(group.getOutputPorts(), newGroupId);
replaceGroupId(group.getControllerServices(), newGroupId);
replaceGroupId(group.getConnections(), newGroupId);
if (group.getConnections() != null) {
for (final VersionedConnection connection : group.getConnections()) {
replaceGroupId(connection.getSource(), originalGroupId, newGroupId);
replaceGroupId(connection.getDestination(), originalGroupId, newGroupId);
}
}
return originalGroupId;
}
private <T extends VersionedComponent> void replaceGroupId(final Collection<T> components, final String newGroupIdentifier) {
if (components == null) {
return;
}
components.forEach(c -> c.setGroupIdentifier(newGroupIdentifier));
}
private void replaceGroupId(final ConnectableComponent connectableComponent, final String originalGroupId, final String newGroupId) {
if (connectableComponent == null) {
return;
}
if (originalGroupId.equals(connectableComponent.getGroupId())) {
connectableComponent.setGroupId(newGroupId);
}
}
private void updateBucketReferences(final GitRepositoryClient repositoryClient, final RegisteredFlowSnapshot flowSnapshot, final String bucketId) {
final FlowRegistryBucket bucket = createFlowRegistryBucket(repositoryClient, bucketId);
flowSnapshot.setBucket(bucket);
final RegisteredFlow flow = flowSnapshot.getFlow();
flow.setBucketName(bucketId);
flow.setBucketIdentifier(bucketId);
final RegisteredFlowSnapshotMetadata snapshotMetadata = flowSnapshot.getSnapshotMetadata();
snapshotMetadata.setBucketIdentifier(bucketId);
}
// Ensures the snapshot has non-null flow and metadata fields, which would only be null if taking a flow from "Download Flow Definition" and adding directly to Git
private void populateFlowAndSnapshotMetadata(final RegisteredFlowSnapshot flowSnapshot, final FlowLocation flowLocation) {
if (flowSnapshot.getFlow() == null) {
final RegisteredFlow registeredFlow = new RegisteredFlow();
registeredFlow.setName(flowLocation.getFlowId());
registeredFlow.setIdentifier(flowLocation.getFlowId());
flowSnapshot.setFlow(registeredFlow);
}
if (flowSnapshot.getSnapshotMetadata() == null) {
final RegisteredFlowSnapshotMetadata snapshotMetadata = new RegisteredFlowSnapshotMetadata();
snapshotMetadata.setFlowIdentifier(flowLocation.getFlowId());
flowSnapshot.setSnapshotMetadata(snapshotMetadata);
}
}
private void verifyWritePermissions(final GitRepositoryClient repositoryClient) throws AuthorizationException {
if (!repositoryClient.hasWritePermission()) {
throw new AuthorizationException("Client does not have write access to the repository");
}
}
private void verifyReadPermissions(final GitRepositoryClient repositoryClient) throws AuthorizationException {
if (!repositoryClient.hasReadPermission()) {
throw new AuthorizationException("Client does not have read access to the repository");
}
}
protected synchronized GitRepositoryClient getRepositoryClient(final FlowRegistryClientConfigurationContext context) throws IOException, FlowRegistryException {
if (!clientInitialized.get()) {
getLogger().info("Initializing repository client");
repositoryClient = createRepositoryClient(context);
initializeDefaultBucket(context);
directoryExclusionPattern = Pattern.compile(context.getProperty(DIRECTORY_FILTER_EXCLUDE).getValue());
clientInitialized.set(true);
}
return repositoryClient;
}
protected void invalidateClient() {
clientInitialized.set(false);
if (repositoryClient != null) {
try {
repositoryClient.close();
} catch (final Exception e) {
getLogger().warn("Error closing repository client", e);
}
}
repositoryClient = null;
}
// If the client has write permissions to the repo, then ensure the directory for the default bucket is present and if not create it,
// otherwise the client can only be used to import flows from the repo and won't be able to set up the default bucket
private void initializeDefaultBucket(final FlowRegistryClientConfigurationContext context) throws IOException, FlowRegistryException {
if (!repositoryClient.hasWritePermission()) {
getLogger().info("Repository client [{}] does not have write permissions to the repository, skipping setup of default bucket", getIdentifier());
return;
}
final String branch = context.getProperty(REPOSITORY_BRANCH).getValue();
final Set<String> bucketDirectoryNames = repositoryClient.getTopLevelDirectoryNames(branch);
if (!bucketDirectoryNames.isEmpty()) {
getLogger().debug("Found {} existing buckets, skipping setup of default bucket", bucketDirectoryNames.size());
return;
}
final String storageLocation = getStorageLocation(repositoryClient);
getLogger().info("Creating default bucket in repo [{}] on branch [{}]", storageLocation, branch);
repositoryClient.createContent(
GitCreateContentRequest.builder()
.branch(branch)
.path(DEFAULT_BUCKET_KEEP_FILE_PATH)
.content(DEFAULT_BUCKET_KEEP_FILE_CONTENT)
.message(DEFAULT_BUCKET_KEEP_FILE_MESSAGE)
.build()
);
}
/**
* Create the property descriptors for this client.
*
* @return the list of property descriptors
*/
protected abstract List<PropertyDescriptor> createPropertyDescriptors();
/**
* Provide the storage location for this client.
*
* @param repositoryClient the repository client
* @return the storage location value
*/
protected abstract String getStorageLocation(final GitRepositoryClient repositoryClient);
/**
* Creates the repository client based on the current configuration context.
*
* @param context the configuration context
* @return the repository client
* @throws IOException if an I/O error occurs creating the client
* @throws FlowRegistryException if a non-I/O error occurs creating the client
*/
protected abstract GitRepositoryClient createRepositoryClient(final FlowRegistryClientConfigurationContext context) throws IOException, FlowRegistryException;
// protected to allow for overriding from tests
protected FlowSnapshotSerializer createFlowSnapshotSerializer() {
return new JacksonFlowSnapshotSerializer();
}
enum ParameterContextValuesStrategy implements DescribedValue {
RETAIN("Retain", "Retain Values in Parameter Contexts without modifications"),
REMOVE("Remove", "Remove Values from Parameter Context"),
IGNORE_CHANGES("Ignore Changes", "Ignore any change on existing parameters");
private final String displayName;
private final String description;
ParameterContextValuesStrategy(final String displayName, final String description) {
this.displayName = displayName;
this.description = description;
}
@Override
public String getValue() {
return name();
}
@Override
public String getDisplayName() {
return displayName;
}
@Override
public String getDescription() {
return description;
}
}
}
|
apache/oodt | 36,110 | filemgr/src/test/java/org/apache/oodt/cas/filemgr/structs/type/TestTypeHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.filemgr.structs.type;
import org.apache.oodt.cas.filemgr.catalog.Catalog;
import org.apache.oodt.cas.filemgr.catalog.DataSourceCatalog;
import org.apache.oodt.cas.filemgr.catalog.DataSourceCatalogFactory;
import org.apache.oodt.cas.filemgr.structs.Product;
import org.apache.oodt.cas.filemgr.structs.ProductPage;
import org.apache.oodt.cas.filemgr.structs.ProductType;
import org.apache.oodt.cas.filemgr.structs.Query;
import org.apache.oodt.cas.filemgr.structs.QueryCriteria;
import org.apache.oodt.cas.filemgr.structs.RangeQueryCriteria;
import org.apache.oodt.cas.filemgr.structs.Reference;
import org.apache.oodt.cas.filemgr.structs.TermQueryCriteria;
import org.apache.oodt.cas.filemgr.structs.exceptions.CatalogException;
import org.apache.oodt.cas.filemgr.structs.exceptions.ConnectionException;
import org.apache.oodt.cas.filemgr.system.FileManagerClient;
import org.apache.oodt.cas.filemgr.system.FileManagerServer;
import org.apache.oodt.cas.filemgr.util.RpcCommunicationFactory;
import org.apache.oodt.cas.filemgr.validation.ValidationLayer;
import org.apache.oodt.cas.metadata.Metadata;
import org.apache.oodt.commons.database.DatabaseConnectionBuilder;
import org.apache.oodt.commons.database.SqlScript;
import org.apache.oodt.commons.pagination.PaginationUtils;
import org.apache.xmlrpc.XmlRpcException;
import java.io.File;
import java.io.FileInputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.sql.DataSource;
import junit.framework.TestCase;
public class TestTypeHandler extends TestCase {
private static Logger LOG = Logger.getLogger(TestTypeHandler.class.getName());
String tmpDirPath;
DataSource publicDataSource;
FileManagerServer fmServer;
int FILEMGR_PORT = 9999;
private Properties initialProperties = new Properties(
System.getProperties());
public void setUpProperties() {
Properties properties = new Properties(System.getProperties());
// set the log levels
URL loggingPropertiesUrl = this.getClass().getResource(
"/test.logging.properties");
properties.setProperty("java.util.logging.config.file", new File(
loggingPropertiesUrl.getFile()).getAbsolutePath());
// first load the example configuration
try {
URL filemgrPropertiesUrl = this.getClass().getResource(
"/filemgr.properties");
properties.load(new FileInputStream(
filemgrPropertiesUrl.getFile()));
} catch (Exception e) {
fail(e.getMessage());
}
// get a temp directory
File tempDir = null;
File tempFile;
try {
tempFile = File.createTempFile("foo", "bar");
tempFile.deleteOnExit();
tempDir = tempFile.getParentFile();
} catch (Exception e) {
fail(e.getMessage());
}
tmpDirPath = tempDir.getAbsolutePath();
if (!tmpDirPath.endsWith("/")) {
tmpDirPath += "/";
}
tmpDirPath += "testCat";
// now override the catalog ones
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.lucene.idxPath",
tmpDirPath);
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.url",
"jdbc:hsqldb:file:" + tmpDirPath + "/testCat;shutdown=true");
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.user",
"sa");
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.pass",
"");
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.driver",
"org.hsqldb.jdbcDriver");
// now override the val layer ones
URL structFactoryUrl = this.getClass().getResource(
"/xmlrpc-struct-factory");
properties.setProperty("org.apache.oodt.cas.filemgr.validation.dirs",
"file://" + new File(structFactoryUrl.getFile()).getAbsolutePath());
properties.setProperty("org.apache.oodt.cas.filemgr.repositorymgr.dirs",
"file://" + new File(structFactoryUrl.getFile()).getAbsolutePath());
// override quote fields
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.datasource.quoteFields",
"true");
System.setProperties(properties);
}
public void testAddAndGetMetadata() throws SQLException, MalformedURLException, ConnectionException {
Metadata met = new Metadata();
met.addMetadata("DataVersion", "4.0");
met.addMetadata("ProductName", "test");
Product testProduct = getTestProduct();
FileManagerClient fmClient = RpcCommunicationFactory.createClient(
new URL("http://localhost:" + FILEMGR_PORT));
try {
testProduct.setProductType(fmClient.getProductTypeByName("GenericFile"));
testProduct.setProductId(fmClient.ingestProduct(testProduct, met, false));
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage());
fail(e.getMessage());
}
try {
Metadata retMet = fmClient.getMetadata(testProduct);
assertNotNull(retMet);
assertTrue(retMet.containsKey("DataVersion"));
assertEquals("4.0", retMet
.getMetadata("DataVersion"));
} catch (CatalogException e) {
fail(e.getMessage());
}
Statement statement = publicDataSource.getConnection().createStatement();
ResultSet rs = statement.executeQuery("SELECT metadata_value FROM GenericFile_metadata WHERE element_id = 'urn:test:DataVersion';");
rs.next();
assertEquals(rs.getString("metadata_value"), "04.00");
}
public void testQuery() throws MalformedURLException, ConnectionException, CatalogException {
Metadata met = new Metadata();
met.addMetadata("DataVersion", "4.0");
met.addMetadata("ProductName", "test");
Product testProduct = getTestProduct();
ProductType genericFile = null;
FileManagerClient fmClient = RpcCommunicationFactory.createClient(new URL("http://localhost:" + FILEMGR_PORT));
try {
testProduct.setProductType(genericFile = fmClient.getProductTypeByName("GenericFile"));
testProduct.setProductId(fmClient.ingestProduct(testProduct, met, false));
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage());
fail(e.getMessage());
}
Query query = new Query();
TermQueryCriteria termQuery = new TermQueryCriteria();
termQuery.setElementName("DataVersion");
termQuery.setValue("4.0");
query.addCriterion(termQuery);
List<Product> products = fmClient.query(query, genericFile);
assertEquals(products.get(0).getProductId(), testProduct.getProductId());
}
public void testGetCatalogAndOrigValuesAndGetCatalogQuery() throws Exception {
Metadata met = new Metadata();
met.addMetadata("DataVersion", "4.0");
met.addMetadata("ProductName", "test");
Product testProduct = getTestProduct();
FileManagerClient fmClient = RpcCommunicationFactory.createClient(
new URL("http://localhost:" + FILEMGR_PORT));
ProductType genericFile = fmClient.getProductTypeByName("GenericFile");
assertEquals("04.00", (met = fmClient.getCatalogValues(met, genericFile)).getMetadata("DataVersion"));
assertEquals("4.0", fmClient.getOrigValues(met, genericFile).getMetadata("DataVersion"));
Query query = new Query();
TermQueryCriteria termQuery = new TermQueryCriteria();
termQuery.setElementName("DataVersion");
termQuery.setValue("4.0");
query.addCriterion(termQuery);
query = fmClient.getCatalogQuery(query, genericFile);
assertEquals("04.00", ((TermQueryCriteria) query.getCriteria().get(0)).getValue());
}
private Product getTestProduct() throws MalformedURLException {
Product testProduct = Product.getDefaultFlatProduct("test",
"urn:oodt:GenericFile");
List<Reference> refs = new LinkedList<Reference>();
URL refUrl = this.getClass().getResource("/ingest/test.txt");
Reference ref = new Reference();
ref.setOrigReference(new File(refUrl.getFile()).toURI().toURL().toExternalForm());
ref.setFileSize(123);
refs.add(ref);
testProduct.setProductReferences(refs);
return testProduct;
}
/*
* (non-Javadoc)
*
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
setUpProperties();
createSchema();
startXmlRpcFileManager();
}
/*
* (non-Javadoc)
*
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
fmServer.shutdown();
// now remove the temporary directory used
if (tmpDirPath != null) {
File tmpDir = new File(tmpDirPath);
File[] tmpFiles = tmpDir.listFiles();
if (tmpFiles != null && tmpFiles.length > 0) {
for (File tmpFile : tmpFiles) {
tmpFile.delete();
}
tmpDir.delete();
}
}
System.setProperties(initialProperties);
}
private void createSchema() {
String url = System
.getProperty("org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.url");
String user = System
.getProperty("org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.user");
String pass = System
.getProperty("org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.pass");
String driver = System
.getProperty("org.apache.oodt.cas.filemgr.catalog.datasource.jdbc.driver");
publicDataSource = DatabaseConnectionBuilder.buildDataSource(user, pass,
driver, url);
try {
URL scriptUrl = this.getClass().getResource("/testcat.sql");
SqlScript coreSchemaScript = new SqlScript(new File(
scriptUrl.getFile()).getAbsolutePath(), publicDataSource);
coreSchemaScript.loadScript();
coreSchemaScript.execute();
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage());
fail(e.getMessage());
}
}
private void startXmlRpcFileManager() {
try {
fmServer = RpcCommunicationFactory.createServer(FILEMGR_PORT);
fmServer.startUp();
fmServer.setCatalog(new HsqlDbFriendlyDataSourceCatalogFatory().createCatalog());
} catch (Exception e) {
fail(e.getMessage());
}
}
class HsqlDbFriendlyDataSourceCatalogFatory extends
DataSourceCatalogFactory {
public HsqlDbFriendlyDataSourceCatalogFatory() {
super();
}
/*
* (non-Javadoc)
*
* @see
* org.apache.oodt.cas.filemgr.catalog.CatalogFactory#createCatalog()
*/
public Catalog createCatalog() {
return new HsqlDbFriendlyDataSourceCatalog(dataSource,
validationLayer, fieldIdStr, pageSize, cacheUpdateMinutes);
}
}
class HsqlDbFriendlyDataSourceCatalog extends DataSourceCatalog {
public HsqlDbFriendlyDataSourceCatalog(DataSource ds,
ValidationLayer valLayer, boolean fieldId, int pageSize,
long cacheUpdateMin) {
super(ds, valLayer, fieldId, pageSize, cacheUpdateMin);
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.filemgr.catalog.DataSourceCatalog#pagedQuery(org.apache.oodt.cas.filemgr.structs.Query,
* org.apache.oodt.cas.filemgr.structs.ProductType, int)
*/
public ProductPage pagedQuery(Query query, ProductType type, int pageNum)
throws CatalogException {
int totalPages = PaginationUtils.getTotalPage(getResultListSize(
query, type), this.pageSize);
/*
* if there are 0 total pages in the result list size then don't
* bother returning a valid product page instead, return blank
* ProductPage
*/
if (totalPages == 0) {
return ProductPage.blankPage();
}
ProductPage retPage = new ProductPage();
retPage.setPageNum(pageNum);
retPage.setPageSize(this.pageSize);
retPage.setTotalPages(totalPages);
List productIds = paginateQuery(query, type, pageNum);
if (productIds != null && productIds.size() > 0) {
List products = new Vector(productIds.size());
for (Object productId1 : productIds) {
String productId = (String) productId1;
Product p = getProductById(productId);
products.add(p);
}
retPage.setPageProducts(products);
}
return retPage;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.filemgr.catalog.DataSourceCatalog#getFirstPage(org.apache.oodt.cas.filemgr.structs.ProductType)
*/
public ProductPage getFirstPage(ProductType type) {
Query query = new Query();
ProductPage firstPage = null;
try {
firstPage = pagedQuery(query, type, 1);
} catch (CatalogException e) {
LOG.log(Level.WARNING,
"Exception getting first page: Message: "
+ e.getMessage());
}
return firstPage;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.filemgr.catalog.DataSourceCatalog#getNextPage(org.apache.oodt.cas.filemgr.structs.ProductType,
* org.apache.oodt.cas.filemgr.structs.ProductPage)
*/
public ProductPage getNextPage(ProductType type, ProductPage currentPage) {
if (currentPage == null) {
return getFirstPage(type);
}
if (currentPage.isLastPage()) {
return currentPage;
}
ProductPage nextPage = null;
Query query = new Query();
try {
nextPage = pagedQuery(query, type, currentPage.getPageNum() + 1);
} catch (CatalogException e) {
LOG.log(Level.WARNING, "Exception getting next page: Message: "
+ e.getMessage());
}
return nextPage;
}
private List paginateQuery(Query query, ProductType type, int pageNum)
throws CatalogException {
Connection conn = null;
Statement statement = null;
ResultSet rs = null;
List productIds = null;
boolean doSkip = true;
int numResults = -1;
if (pageNum == -1) {
doSkip = false;
} else {
numResults = getResultListSize(query, type);
}
try {
conn = dataSource.getConnection();
statement = conn.createStatement(
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
String getProductSql;
String tableName = type.getName() + "_metadata";
String subSelectQueryBase = "SELECT product_id FROM "
+ tableName + " ";
StringBuilder fromClause = new StringBuilder("FROM " + tableName
+ " p ");
StringBuilder whereClause = new StringBuilder("WHERE ");
boolean gotFirstClause = false;
int clauseNum = 0;
if (this.getValidationLayer() != null
&& query.getCriteria() != null
&& query.getCriteria().size() > 0) {
for (QueryCriteria criteria : query.getCriteria()) {
clauseNum++;
String elementIdStr;
if (fieldIdStringFlag) {
elementIdStr = "'" + this.getValidationLayer().getElementByName(criteria.getElementName())
.getElementId() + "'";
} else {
elementIdStr =
this.getValidationLayer().getElementByName(criteria.getElementName()).getElementId();
}
String clause;
if (!gotFirstClause) {
clause = "(p.element_id = " + elementIdStr
+ " AND ";
if (criteria instanceof TermQueryCriteria) {
clause += " metadata_value LIKE '%"
+ ((TermQueryCriteria) criteria)
.getValue() + "%') ";
} else if (criteria instanceof RangeQueryCriteria) {
String startVal = ((RangeQueryCriteria) criteria)
.getStartValue();
String endVal = ((RangeQueryCriteria) criteria)
.getEndValue();
boolean inclusive = ((RangeQueryCriteria) criteria)
.getInclusive();
if ((startVal != null && !startVal.equals(""))
|| (endVal != null && !endVal
.equals(""))) {
clause += " metadata_value ";
boolean gotStart = false;
if (startVal != null
&& !startVal.equals("")) {
if (inclusive) {
clause += ">= '" + startVal + "'";
} else {
clause += "> '" + startVal + "'";
}
gotStart = true;
}
if (endVal != null && !endVal.equals("")) {
if (gotStart) {
if (inclusive) {
clause += " AND metadata_value <= '"
+ endVal + "'";
} else {
clause += " AND metadata_value < '"
+ endVal + "'";
}
} else if (inclusive) {
clause += "<= '" + endVal + "'";
} else {
clause += "< '" + endVal + "'";
}
}
clause += ") ";
}
}
whereClause.append(clause);
gotFirstClause = true;
} else {
String subSelectTblName = "p" + clauseNum;
String subSelectQuery = subSelectQueryBase
+ "WHERE (element_id = " + elementIdStr
+ " AND ";
if (criteria instanceof TermQueryCriteria) {
subSelectQuery += " metadata_value LIKE '%"
+ ((TermQueryCriteria) criteria)
.getValue() + "%')";
} else if (criteria instanceof RangeQueryCriteria) {
String startVal = ((RangeQueryCriteria) criteria)
.getStartValue();
String endVal = ((RangeQueryCriteria) criteria)
.getEndValue();
if (startVal != null || endVal != null) {
subSelectQuery += " metadata_value ";
boolean gotStart = false;
if (startVal != null
&& !startVal.equals("")) {
subSelectQuery += ">= '" + startVal
+ "'";
gotStart = true;
}
if (endVal != null && !endVal.equals("")) {
if (gotStart) {
subSelectQuery += " AND metadata_value <= '"
+ endVal + "'";
} else {
subSelectQuery += "<= '" + endVal
+ "'";
}
}
subSelectQuery += ") ";
}
}
fromClause.append("INNER JOIN (").append(subSelectQuery).append(") ")
.append(subSelectTblName).append(" ON ").append(subSelectTblName)
.append(".product_id = p.product_id ");
}
}
}
getProductSql = "SELECT DISTINCT p.product_id " + fromClause.toString();
if (gotFirstClause) {
getProductSql += whereClause.toString();
}
LOG.log(Level.FINE, "catalog query: executing: "
+ getProductSql);
rs = statement.executeQuery(getProductSql);
productIds = new Vector();
if (doSkip) {
int startNum = (pageNum - 1) * pageSize;
if (startNum > numResults) {
startNum = 0;
}
// must call next first, or else no relative cursor
if (rs.next()) {
// grab the first one
int numGrabbed;
if (pageNum == 1) {
numGrabbed = 1;
productIds.add(rs.getString("product_id"));
} else {
numGrabbed = 0;
}
// now move the cursor to the correct position
if(pageNum != 1){
rs.relative(startNum-1);
}
// grab the rest
while (rs.next() && numGrabbed < pageSize) {
String productId = rs.getString("product_id");
productIds.add(productId);
numGrabbed++;
}
}
} else {
while (rs.next()) {
String productId = rs.getString("product_id");
productIds.add(productId);
}
}
if (productIds.size() == 0) {
productIds = null;
}
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage());
LOG.log(Level.WARNING, "Exception performing query. Message: "
+ e.getMessage());
try {
conn.rollback();
} catch (SQLException e2) {
LOG.log(Level.SEVERE,
"Unable to rollback query transaction. Message: "
+ e2.getMessage());
}
throw new CatalogException(e.getMessage());
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException ignore) {
}
}
if (statement != null) {
try {
statement.close();
} catch (SQLException ignore) {
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignore) {
}
}
}
return productIds;
}
protected int getResultListSize(Query query, ProductType type)
throws CatalogException {
Connection conn = null;
Statement statement = null;
ResultSet rs = null;
int resultCount = 0;
try {
conn = dataSource.getConnection();
statement = conn.createStatement();
String getProductSql;
String tableName = type.getName() + "_metadata";
String subSelectQueryBase = "SELECT product_id FROM "
+ tableName + " ";
StringBuilder fromClause = new StringBuilder("FROM " + tableName
+ " p ");
StringBuilder whereClause = new StringBuilder("WHERE ");
boolean gotFirstClause = false;
int clauseNum = 0;
if (this.getValidationLayer() != null
&& query.getCriteria() != null
&& query.getCriteria().size() > 0) {
for (QueryCriteria criteria : query.getCriteria()) {
clauseNum++;
String elementIdStr;
if (fieldIdStringFlag) {
elementIdStr = "'" + this.getValidationLayer().getElementByName(criteria.getElementName())
.getElementId() + "'";
} else {
elementIdStr =
this.getValidationLayer().getElementByName(criteria.getElementName()).getElementId();
}
String clause;
if (!gotFirstClause) {
clause = "(p.element_id = " + elementIdStr
+ " AND ";
if (criteria instanceof TermQueryCriteria) {
clause += " metadata_value LIKE '%"
+ ((TermQueryCriteria) criteria)
.getValue() + "%') ";
} else if (criteria instanceof RangeQueryCriteria) {
String startVal = ((RangeQueryCriteria) criteria)
.getStartValue();
String endVal = ((RangeQueryCriteria) criteria)
.getEndValue();
boolean inclusive = ((RangeQueryCriteria) criteria)
.getInclusive();
if ((startVal != null && !startVal.equals(""))
|| (endVal != null && !endVal
.equals(""))) {
clause += " metadata_value ";
boolean gotStart = false;
if (startVal != null
&& !startVal.equals("")) {
if (inclusive) {
clause += ">= '" + startVal + "'";
} else {
clause += "> '" + startVal + "'";
}
gotStart = true;
}
if (endVal != null && !endVal.equals("")) {
if (gotStart) {
if (inclusive) {
clause += " AND metadata_value <= '"
+ endVal + "'";
} else {
clause += " AND metadata_value < '"
+ endVal + "'";
}
} else if (inclusive) {
clause += "<= '" + endVal + "'";
} else {
clause += "< '" + endVal + "'";
}
}
clause += ") ";
}
}
whereClause.append(clause);
gotFirstClause = true;
} else {
String subSelectTblName = "p" + clauseNum;
String subSelectQuery = subSelectQueryBase
+ "WHERE (element_id = " + elementIdStr
+ " AND ";
if (criteria instanceof TermQueryCriteria) {
subSelectQuery += " metadata_value LIKE '%"
+ ((TermQueryCriteria) criteria)
.getValue() + "%')";
} else if (criteria instanceof RangeQueryCriteria) {
String startVal = ((RangeQueryCriteria) criteria)
.getStartValue();
String endVal = ((RangeQueryCriteria) criteria)
.getEndValue();
if (startVal != null || endVal != null) {
subSelectQuery += " metadata_value ";
boolean gotStart = false;
if (startVal != null
&& !startVal.equals("")) {
subSelectQuery += ">= '" + startVal
+ "'";
gotStart = true;
}
if (endVal != null && !endVal.equals("")) {
if (gotStart) {
subSelectQuery += " AND metadata_value <= '"
+ endVal + "'";
} else {
subSelectQuery += "<= '" + endVal
+ "'";
}
}
subSelectQuery += ") ";
}
}
fromClause.append("INNER JOIN (").append(subSelectQuery).append(") ")
.append(subSelectTblName).append(" ON ").append(subSelectTblName)
.append(".product_id = p.product_id ");
}
}
}
getProductSql = "SELECT COUNT(DISTINCT p.product_id) AS numResults " + fromClause.toString();
if (gotFirstClause) {
getProductSql += whereClause.toString();
}
LOG.log(Level.FINE, "catalog get num results: executing: "
+ getProductSql);
rs = statement.executeQuery(getProductSql);
while (rs.next()) {
resultCount = rs.getInt("numResults");
}
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage());
LOG.log(Level.WARNING,
"Exception performing get num results. Message: "
+ e.getMessage());
try {
conn.rollback();
} catch (SQLException e2) {
LOG.log(Level.SEVERE,
"Unable to rollback get num results transaction. Message: "
+ e2.getMessage());
}
throw new CatalogException(e.getMessage());
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException ignore) {
}
}
if (statement != null) {
try {
statement.close();
} catch (SQLException ignore) {
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignore) {
}
}
}
return resultCount;
}
}
}
|
apache/tinkerpop | 36,423 | gremlin-annotations/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/dsl/GremlinDslProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.dsl;
import com.squareup.javapoet.ArrayTypeName;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import com.squareup.javapoet.TypeVariableName;
import com.squareup.javapoet.WildcardTypeName;
import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.AddEdgeStartStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.AddVertexStartStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep;
import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.InjectStep;
import org.apache.tinkerpop.gremlin.process.traversal.util.DefaultTraversal;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedSourceVersion;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.WildcardType;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* A custom Java annotation processor for the {@link GremlinDsl} annotation that helps to generate DSLs classes.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
@SupportedAnnotationTypes("org.apache.tinkerpop.gremlin.process.traversal.dsl.GremlinDsl")
@SupportedSourceVersion(SourceVersion.RELEASE_8)
public class GremlinDslProcessor extends AbstractProcessor {
private Messager messager;
private Elements elementUtils;
private Filer filer;
private Types typeUtils;
@Override
public synchronized void init(final ProcessingEnvironment processingEnv) {
super.init(processingEnv);
messager = processingEnv.getMessager();
elementUtils = processingEnv.getElementUtils();
filer = processingEnv.getFiler();
typeUtils = processingEnv.getTypeUtils();
}
@Override
public boolean process(final Set<? extends TypeElement> annotations, final RoundEnvironment roundEnv) {
try {
for (Element dslElement : roundEnv.getElementsAnnotatedWith(GremlinDsl.class)) {
validateDSL(dslElement);
final Context ctx = new Context((TypeElement) dslElement);
// creates the "Traversal" interface using an extension of the GraphTraversal class that has the
// GremlinDsl annotation on it
generateTraversalInterface(ctx);
// create the "DefaultTraversal" class which implements the above generated "Traversal" and can then
// be used by the "TraversalSource" generated below to spawn new traversal instances.
generateDefaultTraversal(ctx);
// create the "TraversalSource" class which is used to spawn traversals from a Graph instance. It will
// spawn instances of the "DefaultTraversal" generated above.
generateTraversalSource(ctx);
// create anonymous traversal for DSL
generateAnonymousTraversal(ctx);
}
} catch (Exception ex) {
messager.printMessage(Diagnostic.Kind.ERROR, ex.getMessage());
}
return true;
}
private void generateAnonymousTraversal(final Context ctx) throws IOException {
final TypeSpec.Builder anonymousClass = TypeSpec.classBuilder("__")
.addModifiers(Modifier.PUBLIC, Modifier.FINAL);
// this class is just static methods - it should not be instantiated
anonymousClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PRIVATE)
.build());
// add start() method
anonymousClass.addMethod(MethodSpec.methodBuilder("start")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.addTypeVariable(TypeVariableName.get("A"))
.addStatement("return new $N<>()", ctx.defaultTraversalClazz)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, TypeVariableName.get("A"), TypeVariableName.get("A")))
.build());
// process the methods of the GremlinDsl annotated class
for (ExecutableElement templateMethod : findMethodsOfElement(ctx.annotatedDslType, null)) {
final Optional<GremlinDsl.AnonymousMethod> methodAnnotation = Optional.ofNullable(templateMethod.getAnnotation(GremlinDsl.AnonymousMethod.class));
final String methodName = templateMethod.getSimpleName().toString();
// either use the direct return type of the DSL specification or override it with specification from
// GremlinDsl.AnonymousMethod
final TypeName returnType = methodAnnotation.isPresent() && methodAnnotation.get().returnTypeParameters().length > 0 ?
getOverridenReturnTypeDefinition(ctx.traversalClassName, methodAnnotation.get().returnTypeParameters()) :
getReturnTypeDefinition(ctx.traversalClassName, templateMethod);
final MethodSpec.Builder methodToAdd = MethodSpec.methodBuilder(methodName)
.addModifiers(Modifier.STATIC, Modifier.PUBLIC)
.addExceptions(templateMethod.getThrownTypes().stream().map(TypeName::get).collect(Collectors.toList()))
.returns(returnType);
// either use the method type parameter specified from the GremlinDsl.AnonymousMethod or just infer them
// from the DSL specification. "inferring" relies on convention and sometimes doesn't work for all cases.
final String startGeneric = methodAnnotation.isPresent() && methodAnnotation.get().methodTypeParameters().length > 0 ?
methodAnnotation.get().methodTypeParameters()[0] : "S";
if (methodAnnotation.isPresent() && methodAnnotation.get().methodTypeParameters().length > 0)
Stream.of(methodAnnotation.get().methodTypeParameters()).map(TypeVariableName::get).forEach(methodToAdd::addTypeVariable);
else {
templateMethod.getTypeParameters().forEach(tp -> methodToAdd.addTypeVariable(TypeVariableName.get(tp)));
// might have to deal with an "S" (in __ it's usually an "A") - how to make this less bound to that convention?
final List<? extends TypeMirror> returnTypeArguments = getTypeArguments(templateMethod);
returnTypeArguments.stream().filter(rtm -> rtm instanceof TypeVariable).forEach(rtm -> {
if (((TypeVariable) rtm).asElement().getSimpleName().contentEquals("S"))
methodToAdd.addTypeVariable(TypeVariableName.get(((TypeVariable) rtm).asElement().getSimpleName().toString()));
});
}
addMethodBody(methodToAdd, templateMethod, "return __.<" + startGeneric + ">start().$L(", ")", methodName);
anonymousClass.addMethod(methodToAdd.build());
}
// use methods from __ to template them into the DSL __
final Element anonymousTraversal = elementUtils.getTypeElement(__.class.getCanonicalName());
final Predicate<ExecutableElement> ignore = ee -> ee.getSimpleName().contentEquals("start");
for (ExecutableElement templateMethod : findMethodsOfElement(anonymousTraversal, ignore)) {
final String methodName = templateMethod.getSimpleName().toString();
final TypeName returnType = getReturnTypeDefinition(ctx.traversalClassName, templateMethod);
final MethodSpec.Builder methodToAdd = MethodSpec.methodBuilder(methodName)
.addModifiers(Modifier.STATIC, Modifier.PUBLIC)
.addExceptions(templateMethod.getThrownTypes().stream().map(TypeName::get).collect(Collectors.toList()))
.returns(returnType);
templateMethod.getTypeParameters().forEach(tp -> methodToAdd.addTypeVariable(TypeVariableName.get(tp)));
if (methodName.equals("__")) {
for (VariableElement param : templateMethod.getParameters()) {
methodToAdd.addParameter(ParameterSpec.get(param));
}
methodToAdd.varargs(true);
methodToAdd.addStatement("return inject(starts)", methodName);
} else {
if (templateMethod.getTypeParameters().isEmpty()) {
final List<? extends TypeMirror> types = getTypeArguments(templateMethod);
addMethodBody(methodToAdd, templateMethod, "return __.<$T>start().$L(", ")", types.get(0), methodName);
} else {
addMethodBody(methodToAdd, templateMethod, "return __.<A>start().$L(", ")", methodName);
}
}
anonymousClass.addMethod(methodToAdd.build());
}
final JavaFile traversalSourceJavaFile = JavaFile.builder(ctx.packageName, anonymousClass.build()).build();
traversalSourceJavaFile.writeTo(filer);
}
private void generateTraversalSource(final Context ctx) throws IOException {
final TypeElement graphTraversalSourceElement = ctx.traversalSourceDslType;
final TypeSpec.Builder traversalSourceClass = TypeSpec.classBuilder(ctx.traversalSourceClazz)
.addModifiers(Modifier.PUBLIC)
.superclass(TypeName.get(graphTraversalSourceElement.asType()));
// add the required constructors for instantiation
traversalSourceClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(Graph.class, "graph")
.addStatement("super($N)", "graph")
.build());
traversalSourceClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(Graph.class, "graph")
.addParameter(TraversalStrategies.class, "strategies")
.addStatement("super($N, $N)", "graph", "strategies")
.build());
traversalSourceClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(RemoteConnection.class, "connection")
.addStatement("super($N)", "connection")
.build());
// override methods to return the DSL TraversalSource. find GraphTraversalSource class somewhere in the hierarchy
final Element tinkerPopsGraphTraversalSource = findClassAsElement(graphTraversalSourceElement, GraphTraversalSource.class);
final Predicate<ExecutableElement> notGraphTraversalSourceReturnValues = e -> !(e.getReturnType().getKind() == TypeKind.DECLARED && ((DeclaredType) e.getReturnType()).asElement().getSimpleName().contentEquals(GraphTraversalSource.class.getSimpleName()));
for (ExecutableElement elementOfGraphTraversalSource : findMethodsOfElement(tinkerPopsGraphTraversalSource, notGraphTraversalSourceReturnValues)) {
// first copy/override methods that return a GraphTraversalSource so that we can instead return
// the DSL TraversalSource class.
traversalSourceClass.addMethod(constructMethod(elementOfGraphTraversalSource, ctx.traversalSourceClassName, "",Modifier.PUBLIC));
}
// override methods that return GraphTraversal that come from the user defined extension of GraphTraversal
if (!graphTraversalSourceElement.getSimpleName().contentEquals(GraphTraversalSource.class.getSimpleName())) {
final Predicate<ExecutableElement> notGraphTraversalReturnValues = e -> !(e.getReturnType().getKind() == TypeKind.DECLARED && ((DeclaredType) e.getReturnType()).asElement().getSimpleName().contentEquals(GraphTraversal.class.getSimpleName()));
for (ExecutableElement templateMethod : findMethodsOfElement(graphTraversalSourceElement, notGraphTraversalReturnValues)) {
final MethodSpec.Builder methodToAdd = MethodSpec.methodBuilder(templateMethod.getSimpleName().toString())
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class);
methodToAdd.addStatement("$T clone = this.clone()", ctx.traversalSourceClassName);
addMethodBody(methodToAdd, templateMethod, "return new $T (clone, super.$L(", ").asAdmin())",
ctx.defaultTraversalClassName, templateMethod.getSimpleName());
methodToAdd.returns(getReturnTypeDefinition(ctx.traversalClassName, templateMethod));
traversalSourceClass.addMethod(methodToAdd.build());
}
}
if (ctx.generateDefaultMethods) {
// override methods that return GraphTraversal
traversalSourceClass.addMethod(MethodSpec.methodBuilder("addV")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.addV)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, (String) null))", ctx.traversalClassName, AddVertexStartStep.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Vertex.class), ClassName.get(Vertex.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("addV")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(String.class, "label")
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.addV, label)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, label))", ctx.traversalClassName, AddVertexStartStep.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Vertex.class), ClassName.get(Vertex.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("addV")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(Traversal.class, "vertexLabelTraversal")
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.addV, vertexLabelTraversal)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, vertexLabelTraversal))", ctx.traversalClassName, AddVertexStartStep.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Vertex.class), ClassName.get(Vertex.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("addE")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(String.class, "label")
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.addE, label)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, label))", ctx.traversalClassName, AddEdgeStartStep.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Edge.class), ClassName.get(Edge.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("addE")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(Traversal.class, "edgeLabelTraversal")
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.addE, edgeLabelTraversal)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, edgeLabelTraversal))", ctx.traversalClassName, AddEdgeStartStep.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Edge.class), ClassName.get(Edge.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("V")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(Object[].class, "vertexIds")
.varargs(true)
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.V, vertexIds)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, $T.class, true, vertexIds))", ctx.traversalClassName, GraphStep.class, Vertex.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Vertex.class), ClassName.get(Vertex.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("E")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(Object[].class, "edgeIds")
.varargs(true)
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.E, edgeIds)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, $T.class, true, edgeIds))", ctx.traversalClassName, GraphStep.class, Edge.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, ClassName.get(Edge.class), ClassName.get(Edge.class)))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("inject")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(ArrayTypeName.of(TypeVariableName.get("S")), "starts")
.varargs(true)
.addTypeVariable(TypeVariableName.get("S"))
.addStatement("$N clone = this.clone()", ctx.traversalSourceClazz)
.addStatement("clone.getGremlinLang().addStep($T.inject, starts)", GraphTraversal.Symbols.class)
.addStatement("$N traversal = new $N(clone)", ctx.defaultTraversalClazz, ctx.defaultTraversalClazz)
.addStatement("return ($T) traversal.asAdmin().addStep(new $T(traversal, starts))", ctx.traversalClassName, InjectStep.class)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, TypeVariableName.get("S"), TypeVariableName.get("S")))
.build());
traversalSourceClass.addMethod(MethodSpec.methodBuilder("getAnonymousTraversalClass")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addStatement("return Optional.of(__.class)")
.returns(ParameterizedTypeName.get(ClassName.get(Optional.class),
ParameterizedTypeName.get(ClassName.get(Class.class), WildcardTypeName.subtypeOf(Object.class))))
.build());
}
final JavaFile traversalSourceJavaFile = JavaFile.builder(ctx.packageName, traversalSourceClass.build()).build();
traversalSourceJavaFile.writeTo(filer);
}
private Element findClassAsElement(final Element element, final Class<?> clazz) {
if (element.getSimpleName().contentEquals(clazz.getSimpleName())) {
return element;
}
final List<? extends TypeMirror> supertypes = typeUtils.directSupertypes(element.asType());
return findClassAsElement(typeUtils.asElement(supertypes.get(0)), clazz);
}
private void generateDefaultTraversal(final Context ctx) throws IOException {
final TypeSpec.Builder defaultTraversalClass = TypeSpec.classBuilder(ctx.defaultTraversalClazz)
.addModifiers(Modifier.PUBLIC)
.addTypeVariables(Arrays.asList(TypeVariableName.get("S"), TypeVariableName.get("E")))
.superclass(TypeName.get(elementUtils.getTypeElement(DefaultTraversal.class.getCanonicalName()).asType()))
.addSuperinterface(ParameterizedTypeName.get(ctx.traversalClassName, TypeVariableName.get("S"), TypeVariableName.get("E")));
// add the required constructors for instantiation
defaultTraversalClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addStatement("super()")
.build());
defaultTraversalClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(Graph.class, "graph")
.addStatement("super($N)", "graph")
.build());
defaultTraversalClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(ctx.traversalSourceClassName, "traversalSource")
.addStatement("super($N)", "traversalSource")
.build());
defaultTraversalClass.addMethod(MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(ctx.traversalSourceClassName, "traversalSource")
.addParameter(ctx.graphTraversalAdminClassName, "traversal")
.addStatement("super($N, $N.asAdmin())", "traversalSource", "traversal")
.build());
// add the override
defaultTraversalClass.addMethod(MethodSpec.methodBuilder("iterate")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addStatement("return ($T) super.iterate()", ctx.traversalClassName)
.returns(ParameterizedTypeName.get(ctx.traversalClassName, TypeVariableName.get("S"), TypeVariableName.get("E")))
.build());
defaultTraversalClass.addMethod(MethodSpec.methodBuilder("asAdmin")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addStatement("return ($T) super.asAdmin()", GraphTraversal.Admin.class)
.returns(ParameterizedTypeName.get(ctx.graphTraversalAdminClassName, TypeVariableName.get("S"), TypeVariableName.get("E")))
.build());
defaultTraversalClass.addMethod(MethodSpec.methodBuilder("clone")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addStatement("return ($T) super.clone()", ctx.defaultTraversalClassName)
.returns(ParameterizedTypeName.get(ctx.defaultTraversalClassName, TypeVariableName.get("S"), TypeVariableName.get("E")))
.build());
final JavaFile defaultTraversalJavaFile = JavaFile.builder(ctx.packageName, defaultTraversalClass.build()).build();
defaultTraversalJavaFile.writeTo(filer);
}
private void generateTraversalInterface(final Context ctx) throws IOException {
final TypeSpec.Builder traversalInterface = TypeSpec.interfaceBuilder(ctx.traversalClazz)
.addModifiers(Modifier.PUBLIC)
.addTypeVariables(Arrays.asList(TypeVariableName.get("S"), TypeVariableName.get("E")))
.addSuperinterface(TypeName.get(ctx.annotatedDslType.asType()));
// process the methods of the GremlinDsl annotated class
for (ExecutableElement templateMethod : findMethodsOfElement(ctx.annotatedDslType, null)) {
traversalInterface.addMethod(constructMethod(templateMethod, ctx.traversalClassName, ctx.dslName,
Modifier.PUBLIC, Modifier.DEFAULT));
}
// process the methods of GraphTraversal
final TypeElement graphTraversalElement = elementUtils.getTypeElement(GraphTraversal.class.getCanonicalName());
final Predicate<ExecutableElement> ignore = e -> e.getSimpleName().contentEquals("asAdmin") || e.getSimpleName().contentEquals("iterate");
for (ExecutableElement templateMethod : findMethodsOfElement(graphTraversalElement, ignore)) {
traversalInterface.addMethod(constructMethod(templateMethod, ctx.traversalClassName, ctx.dslName,
Modifier.PUBLIC, Modifier.DEFAULT));
}
// there are weird things with generics that require this method to be implemented if it isn't already present
// in the GremlinDsl annotated class extending from GraphTraversal
traversalInterface.addMethod(MethodSpec.methodBuilder("iterate")
.addModifiers(Modifier.PUBLIC, Modifier.DEFAULT)
.addAnnotation(Override.class)
.addStatement("$T.super.iterate()", ClassName.get(ctx.annotatedDslType))
.addStatement("return this")
.returns(ParameterizedTypeName.get(ctx.traversalClassName, TypeVariableName.get("S"), TypeVariableName.get("E")))
.build());
final JavaFile traversalJavaFile = JavaFile.builder(ctx.packageName, traversalInterface.build()).build();
traversalJavaFile.writeTo(filer);
}
private MethodSpec constructMethod(final Element element, final ClassName returnClazz, final String parent,
final Modifier... modifiers) {
final ExecutableElement templateMethod = (ExecutableElement) element;
final String methodName = templateMethod.getSimpleName().toString();
final TypeName returnType = getReturnTypeDefinition(returnClazz, templateMethod);
final MethodSpec.Builder methodToAdd = MethodSpec.methodBuilder(methodName)
.addModifiers(modifiers)
.addAnnotation(Override.class)
.addExceptions(templateMethod.getThrownTypes().stream().map(TypeName::get).collect(Collectors.toList()))
.returns(returnType);
templateMethod.getTypeParameters().forEach(tp -> methodToAdd.addTypeVariable(TypeVariableName.get(tp)));
final String parentCall = parent.isEmpty() ? "" : parent + ".";
final String body = "return ($T) " + parentCall + "super.$L(";
addMethodBody(methodToAdd, templateMethod, body, ")", returnClazz, methodName);
return methodToAdd.build();
}
private void addMethodBody(final MethodSpec.Builder methodToAdd, final ExecutableElement templateMethod,
final String startBody, final String endBody, final Object... statementArgs) {
final List<? extends VariableElement> parameters = templateMethod.getParameters();
final StringBuilder body = new StringBuilder(startBody);
final int numberOfParams = parameters.size();
for (int ix = 0; ix < numberOfParams; ix++) {
final VariableElement param = parameters.get(ix);
methodToAdd.addParameter(ParameterSpec.get(param));
body.append(param.getSimpleName());
if (ix < numberOfParams - 1) {
body.append(",");
}
}
body.append(endBody);
// treat a final array as a varargs param
if (!parameters.isEmpty() && parameters.get(parameters.size() - 1).asType().getKind() == TypeKind.ARRAY)
methodToAdd.varargs(true);
methodToAdd.addStatement(body.toString(), statementArgs);
}
private TypeName getOverridenReturnTypeDefinition(final ClassName returnClazz, final String[] typeValues) {
return ParameterizedTypeName.get(returnClazz, Stream.of(typeValues).map(tv -> {
try {
return ClassName.get(Class.forName(tv));
} catch (ClassNotFoundException cnfe) {
if (tv.contains("extends")) {
final String[] sides = tv.toString().split(" extends ");
final TypeVariableName name = TypeVariableName.get(sides[0]);
try {
name.withBounds(ClassName.get(Class.forName(sides[1])));
} catch (Exception ex) {
name.withBounds(TypeVariableName.get(sides[1]));
}
return name;
} else {
return TypeVariableName.get(tv);
}
}
}).collect(Collectors.toList()).toArray(new TypeName[typeValues.length]));
}
private TypeName getReturnTypeDefinition(final ClassName returnClazz, final ExecutableElement templateMethod) {
final List<? extends TypeMirror> returnTypeArguments = getTypeArguments(templateMethod);
// build a return type with appropriate generic declarations (if such declarations are present)
return returnTypeArguments.isEmpty() ?
returnClazz :
ParameterizedTypeName.get(returnClazz, returnTypeArguments.stream().map(TypeName::get).collect(Collectors.toList()).toArray(new TypeName[returnTypeArguments.size()]));
}
private void validateDSL(final Element dslElement) throws ProcessorException {
if (dslElement.getKind() != ElementKind.INTERFACE)
throw new ProcessorException(dslElement, "Only interfaces can be annotated with @%s", GremlinDsl.class.getSimpleName());
final TypeElement typeElement = (TypeElement) dslElement;
if (!typeElement.getModifiers().contains(Modifier.PUBLIC))
throw new ProcessorException(dslElement, "The interface %s is not public.", typeElement.getQualifiedName());
}
private List<ExecutableElement> findMethodsOfElement(final Element element, final Predicate<ExecutableElement> ignore) {
final Predicate<ExecutableElement> test = null == ignore ? ee -> false : ignore;
return element.getEnclosedElements().stream()
.filter(ee -> ee.getKind() == ElementKind.METHOD)
.map(ee -> (ExecutableElement) ee)
.filter(ee -> !test.test(ee)).collect(Collectors.toList());
}
private List<? extends TypeMirror> getTypeArguments(final ExecutableElement templateMethod) {
final DeclaredType returnTypeMirror = (DeclaredType) templateMethod.getReturnType();
return returnTypeMirror.getTypeArguments();
}
private class Context {
private final TypeElement annotatedDslType;
private final String packageName;
private final String dslName;
private final String traversalClazz;
private final ClassName traversalClassName;
private final String traversalSourceClazz;
private final ClassName traversalSourceClassName;
private final String defaultTraversalClazz;
private final ClassName defaultTraversalClassName;
private final ClassName graphTraversalAdminClassName;
private final TypeElement traversalSourceDslType;
private final boolean generateDefaultMethods;
public Context(final TypeElement dslElement) {
annotatedDslType = dslElement;
// gets the annotation on the dsl class/interface
GremlinDsl gremlinDslAnnotation = dslElement.getAnnotation(GremlinDsl.class);
generateDefaultMethods = gremlinDslAnnotation.generateDefaultMethods();
traversalSourceDslType = elementUtils.getTypeElement(gremlinDslAnnotation.traversalSource());
packageName = getPackageName(dslElement, gremlinDslAnnotation);
// create the Traversal implementation interface
dslName = dslElement.getSimpleName().toString();
final String dslPrefix = dslName.substring(0, dslName.length() - "TraversalDSL".length()); // chop off "TraversalDSL"
traversalClazz = dslPrefix + "Traversal";
traversalClassName = ClassName.get(packageName, traversalClazz);
traversalSourceClazz = dslPrefix + "TraversalSource";
traversalSourceClassName = ClassName.get(packageName, traversalSourceClazz);
defaultTraversalClazz = "Default" + traversalClazz;
defaultTraversalClassName = ClassName.get(packageName, defaultTraversalClazz);
graphTraversalAdminClassName = ClassName.get(GraphTraversal.Admin.class);
}
private String getPackageName(final Element dslElement, final GremlinDsl gremlinDslAnnotation) {
return gremlinDslAnnotation.packageName().isEmpty() ?
elementUtils.getPackageOf(dslElement).getQualifiedName().toString() :
gremlinDslAnnotation.packageName();
}
}
}
|
googleapis/google-cloud-java | 36,469 | java-managedkafka/google-cloud-managedkafka/src/main/java/com/google/cloud/managedkafka/v1/stub/GrpcManagedKafkaConnectStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.managedkafka.v1.stub;
import static com.google.cloud.managedkafka.v1.ManagedKafkaConnectClient.ListConnectClustersPagedResponse;
import static com.google.cloud.managedkafka.v1.ManagedKafkaConnectClient.ListConnectorsPagedResponse;
import static com.google.cloud.managedkafka.v1.ManagedKafkaConnectClient.ListLocationsPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.managedkafka.v1.ConnectCluster;
import com.google.cloud.managedkafka.v1.Connector;
import com.google.cloud.managedkafka.v1.CreateConnectClusterRequest;
import com.google.cloud.managedkafka.v1.CreateConnectorRequest;
import com.google.cloud.managedkafka.v1.DeleteConnectClusterRequest;
import com.google.cloud.managedkafka.v1.DeleteConnectorRequest;
import com.google.cloud.managedkafka.v1.GetConnectClusterRequest;
import com.google.cloud.managedkafka.v1.GetConnectorRequest;
import com.google.cloud.managedkafka.v1.ListConnectClustersRequest;
import com.google.cloud.managedkafka.v1.ListConnectClustersResponse;
import com.google.cloud.managedkafka.v1.ListConnectorsRequest;
import com.google.cloud.managedkafka.v1.ListConnectorsResponse;
import com.google.cloud.managedkafka.v1.OperationMetadata;
import com.google.cloud.managedkafka.v1.PauseConnectorRequest;
import com.google.cloud.managedkafka.v1.PauseConnectorResponse;
import com.google.cloud.managedkafka.v1.RestartConnectorRequest;
import com.google.cloud.managedkafka.v1.RestartConnectorResponse;
import com.google.cloud.managedkafka.v1.ResumeConnectorRequest;
import com.google.cloud.managedkafka.v1.ResumeConnectorResponse;
import com.google.cloud.managedkafka.v1.StopConnectorRequest;
import com.google.cloud.managedkafka.v1.StopConnectorResponse;
import com.google.cloud.managedkafka.v1.UpdateConnectClusterRequest;
import com.google.cloud.managedkafka.v1.UpdateConnectorRequest;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the ManagedKafkaConnect service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcManagedKafkaConnectStub extends ManagedKafkaConnectStub {
private static final MethodDescriptor<ListConnectClustersRequest, ListConnectClustersResponse>
listConnectClustersMethodDescriptor =
MethodDescriptor.<ListConnectClustersRequest, ListConnectClustersResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.managedkafka.v1.ManagedKafkaConnect/ListConnectClusters")
.setRequestMarshaller(
ProtoUtils.marshaller(ListConnectClustersRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListConnectClustersResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetConnectClusterRequest, ConnectCluster>
getConnectClusterMethodDescriptor =
MethodDescriptor.<GetConnectClusterRequest, ConnectCluster>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.managedkafka.v1.ManagedKafkaConnect/GetConnectCluster")
.setRequestMarshaller(
ProtoUtils.marshaller(GetConnectClusterRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ConnectCluster.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateConnectClusterRequest, Operation>
createConnectClusterMethodDescriptor =
MethodDescriptor.<CreateConnectClusterRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.managedkafka.v1.ManagedKafkaConnect/CreateConnectCluster")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateConnectClusterRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateConnectClusterRequest, Operation>
updateConnectClusterMethodDescriptor =
MethodDescriptor.<UpdateConnectClusterRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.managedkafka.v1.ManagedKafkaConnect/UpdateConnectCluster")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateConnectClusterRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteConnectClusterRequest, Operation>
deleteConnectClusterMethodDescriptor =
MethodDescriptor.<DeleteConnectClusterRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.managedkafka.v1.ManagedKafkaConnect/DeleteConnectCluster")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteConnectClusterRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListConnectorsRequest, ListConnectorsResponse>
listConnectorsMethodDescriptor =
MethodDescriptor.<ListConnectorsRequest, ListConnectorsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/ListConnectors")
.setRequestMarshaller(
ProtoUtils.marshaller(ListConnectorsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListConnectorsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetConnectorRequest, Connector>
getConnectorMethodDescriptor =
MethodDescriptor.<GetConnectorRequest, Connector>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/GetConnector")
.setRequestMarshaller(ProtoUtils.marshaller(GetConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Connector.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateConnectorRequest, Connector>
createConnectorMethodDescriptor =
MethodDescriptor.<CreateConnectorRequest, Connector>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/CreateConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Connector.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateConnectorRequest, Connector>
updateConnectorMethodDescriptor =
MethodDescriptor.<UpdateConnectorRequest, Connector>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/UpdateConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Connector.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteConnectorRequest, Empty>
deleteConnectorMethodDescriptor =
MethodDescriptor.<DeleteConnectorRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/DeleteConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<PauseConnectorRequest, PauseConnectorResponse>
pauseConnectorMethodDescriptor =
MethodDescriptor.<PauseConnectorRequest, PauseConnectorResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/PauseConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(PauseConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(PauseConnectorResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ResumeConnectorRequest, ResumeConnectorResponse>
resumeConnectorMethodDescriptor =
MethodDescriptor.<ResumeConnectorRequest, ResumeConnectorResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/ResumeConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(ResumeConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ResumeConnectorResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<RestartConnectorRequest, RestartConnectorResponse>
restartConnectorMethodDescriptor =
MethodDescriptor.<RestartConnectorRequest, RestartConnectorResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.managedkafka.v1.ManagedKafkaConnect/RestartConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(RestartConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(RestartConnectorResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<StopConnectorRequest, StopConnectorResponse>
stopConnectorMethodDescriptor =
MethodDescriptor.<StopConnectorRequest, StopConnectorResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.managedkafka.v1.ManagedKafkaConnect/StopConnector")
.setRequestMarshaller(
ProtoUtils.marshaller(StopConnectorRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(StopConnectorResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor =
MethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<ListConnectClustersRequest, ListConnectClustersResponse>
listConnectClustersCallable;
private final UnaryCallable<ListConnectClustersRequest, ListConnectClustersPagedResponse>
listConnectClustersPagedCallable;
private final UnaryCallable<GetConnectClusterRequest, ConnectCluster> getConnectClusterCallable;
private final UnaryCallable<CreateConnectClusterRequest, Operation> createConnectClusterCallable;
private final OperationCallable<CreateConnectClusterRequest, ConnectCluster, OperationMetadata>
createConnectClusterOperationCallable;
private final UnaryCallable<UpdateConnectClusterRequest, Operation> updateConnectClusterCallable;
private final OperationCallable<UpdateConnectClusterRequest, ConnectCluster, OperationMetadata>
updateConnectClusterOperationCallable;
private final UnaryCallable<DeleteConnectClusterRequest, Operation> deleteConnectClusterCallable;
private final OperationCallable<DeleteConnectClusterRequest, Empty, OperationMetadata>
deleteConnectClusterOperationCallable;
private final UnaryCallable<ListConnectorsRequest, ListConnectorsResponse> listConnectorsCallable;
private final UnaryCallable<ListConnectorsRequest, ListConnectorsPagedResponse>
listConnectorsPagedCallable;
private final UnaryCallable<GetConnectorRequest, Connector> getConnectorCallable;
private final UnaryCallable<CreateConnectorRequest, Connector> createConnectorCallable;
private final UnaryCallable<UpdateConnectorRequest, Connector> updateConnectorCallable;
private final UnaryCallable<DeleteConnectorRequest, Empty> deleteConnectorCallable;
private final UnaryCallable<PauseConnectorRequest, PauseConnectorResponse> pauseConnectorCallable;
private final UnaryCallable<ResumeConnectorRequest, ResumeConnectorResponse>
resumeConnectorCallable;
private final UnaryCallable<RestartConnectorRequest, RestartConnectorResponse>
restartConnectorCallable;
private final UnaryCallable<StopConnectorRequest, StopConnectorResponse> stopConnectorCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcManagedKafkaConnectStub create(ManagedKafkaConnectStubSettings settings)
throws IOException {
return new GrpcManagedKafkaConnectStub(settings, ClientContext.create(settings));
}
public static final GrpcManagedKafkaConnectStub create(ClientContext clientContext)
throws IOException {
return new GrpcManagedKafkaConnectStub(
ManagedKafkaConnectStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcManagedKafkaConnectStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcManagedKafkaConnectStub(
ManagedKafkaConnectStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcManagedKafkaConnectStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcManagedKafkaConnectStub(
ManagedKafkaConnectStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new GrpcManagedKafkaConnectCallableFactory());
}
/**
* Constructs an instance of GrpcManagedKafkaConnectStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcManagedKafkaConnectStub(
ManagedKafkaConnectStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListConnectClustersRequest, ListConnectClustersResponse>
listConnectClustersTransportSettings =
GrpcCallSettings.<ListConnectClustersRequest, ListConnectClustersResponse>newBuilder()
.setMethodDescriptor(listConnectClustersMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetConnectClusterRequest, ConnectCluster> getConnectClusterTransportSettings =
GrpcCallSettings.<GetConnectClusterRequest, ConnectCluster>newBuilder()
.setMethodDescriptor(getConnectClusterMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateConnectClusterRequest, Operation> createConnectClusterTransportSettings =
GrpcCallSettings.<CreateConnectClusterRequest, Operation>newBuilder()
.setMethodDescriptor(createConnectClusterMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateConnectClusterRequest, Operation> updateConnectClusterTransportSettings =
GrpcCallSettings.<UpdateConnectClusterRequest, Operation>newBuilder()
.setMethodDescriptor(updateConnectClusterMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"connect_cluster.name",
String.valueOf(request.getConnectCluster().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteConnectClusterRequest, Operation> deleteConnectClusterTransportSettings =
GrpcCallSettings.<DeleteConnectClusterRequest, Operation>newBuilder()
.setMethodDescriptor(deleteConnectClusterMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListConnectorsRequest, ListConnectorsResponse>
listConnectorsTransportSettings =
GrpcCallSettings.<ListConnectorsRequest, ListConnectorsResponse>newBuilder()
.setMethodDescriptor(listConnectorsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetConnectorRequest, Connector> getConnectorTransportSettings =
GrpcCallSettings.<GetConnectorRequest, Connector>newBuilder()
.setMethodDescriptor(getConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateConnectorRequest, Connector> createConnectorTransportSettings =
GrpcCallSettings.<CreateConnectorRequest, Connector>newBuilder()
.setMethodDescriptor(createConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateConnectorRequest, Connector> updateConnectorTransportSettings =
GrpcCallSettings.<UpdateConnectorRequest, Connector>newBuilder()
.setMethodDescriptor(updateConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("connector.name", String.valueOf(request.getConnector().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteConnectorRequest, Empty> deleteConnectorTransportSettings =
GrpcCallSettings.<DeleteConnectorRequest, Empty>newBuilder()
.setMethodDescriptor(deleteConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<PauseConnectorRequest, PauseConnectorResponse>
pauseConnectorTransportSettings =
GrpcCallSettings.<PauseConnectorRequest, PauseConnectorResponse>newBuilder()
.setMethodDescriptor(pauseConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ResumeConnectorRequest, ResumeConnectorResponse>
resumeConnectorTransportSettings =
GrpcCallSettings.<ResumeConnectorRequest, ResumeConnectorResponse>newBuilder()
.setMethodDescriptor(resumeConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<RestartConnectorRequest, RestartConnectorResponse>
restartConnectorTransportSettings =
GrpcCallSettings.<RestartConnectorRequest, RestartConnectorResponse>newBuilder()
.setMethodDescriptor(restartConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<StopConnectorRequest, StopConnectorResponse> stopConnectorTransportSettings =
GrpcCallSettings.<StopConnectorRequest, StopConnectorResponse>newBuilder()
.setMethodDescriptor(stopConnectorMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings =
GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
GrpcCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listConnectClustersCallable =
callableFactory.createUnaryCallable(
listConnectClustersTransportSettings,
settings.listConnectClustersSettings(),
clientContext);
this.listConnectClustersPagedCallable =
callableFactory.createPagedCallable(
listConnectClustersTransportSettings,
settings.listConnectClustersSettings(),
clientContext);
this.getConnectClusterCallable =
callableFactory.createUnaryCallable(
getConnectClusterTransportSettings,
settings.getConnectClusterSettings(),
clientContext);
this.createConnectClusterCallable =
callableFactory.createUnaryCallable(
createConnectClusterTransportSettings,
settings.createConnectClusterSettings(),
clientContext);
this.createConnectClusterOperationCallable =
callableFactory.createOperationCallable(
createConnectClusterTransportSettings,
settings.createConnectClusterOperationSettings(),
clientContext,
operationsStub);
this.updateConnectClusterCallable =
callableFactory.createUnaryCallable(
updateConnectClusterTransportSettings,
settings.updateConnectClusterSettings(),
clientContext);
this.updateConnectClusterOperationCallable =
callableFactory.createOperationCallable(
updateConnectClusterTransportSettings,
settings.updateConnectClusterOperationSettings(),
clientContext,
operationsStub);
this.deleteConnectClusterCallable =
callableFactory.createUnaryCallable(
deleteConnectClusterTransportSettings,
settings.deleteConnectClusterSettings(),
clientContext);
this.deleteConnectClusterOperationCallable =
callableFactory.createOperationCallable(
deleteConnectClusterTransportSettings,
settings.deleteConnectClusterOperationSettings(),
clientContext,
operationsStub);
this.listConnectorsCallable =
callableFactory.createUnaryCallable(
listConnectorsTransportSettings, settings.listConnectorsSettings(), clientContext);
this.listConnectorsPagedCallable =
callableFactory.createPagedCallable(
listConnectorsTransportSettings, settings.listConnectorsSettings(), clientContext);
this.getConnectorCallable =
callableFactory.createUnaryCallable(
getConnectorTransportSettings, settings.getConnectorSettings(), clientContext);
this.createConnectorCallable =
callableFactory.createUnaryCallable(
createConnectorTransportSettings, settings.createConnectorSettings(), clientContext);
this.updateConnectorCallable =
callableFactory.createUnaryCallable(
updateConnectorTransportSettings, settings.updateConnectorSettings(), clientContext);
this.deleteConnectorCallable =
callableFactory.createUnaryCallable(
deleteConnectorTransportSettings, settings.deleteConnectorSettings(), clientContext);
this.pauseConnectorCallable =
callableFactory.createUnaryCallable(
pauseConnectorTransportSettings, settings.pauseConnectorSettings(), clientContext);
this.resumeConnectorCallable =
callableFactory.createUnaryCallable(
resumeConnectorTransportSettings, settings.resumeConnectorSettings(), clientContext);
this.restartConnectorCallable =
callableFactory.createUnaryCallable(
restartConnectorTransportSettings, settings.restartConnectorSettings(), clientContext);
this.stopConnectorCallable =
callableFactory.createUnaryCallable(
stopConnectorTransportSettings, settings.stopConnectorSettings(), clientContext);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListConnectClustersRequest, ListConnectClustersResponse>
listConnectClustersCallable() {
return listConnectClustersCallable;
}
@Override
public UnaryCallable<ListConnectClustersRequest, ListConnectClustersPagedResponse>
listConnectClustersPagedCallable() {
return listConnectClustersPagedCallable;
}
@Override
public UnaryCallable<GetConnectClusterRequest, ConnectCluster> getConnectClusterCallable() {
return getConnectClusterCallable;
}
@Override
public UnaryCallable<CreateConnectClusterRequest, Operation> createConnectClusterCallable() {
return createConnectClusterCallable;
}
@Override
public OperationCallable<CreateConnectClusterRequest, ConnectCluster, OperationMetadata>
createConnectClusterOperationCallable() {
return createConnectClusterOperationCallable;
}
@Override
public UnaryCallable<UpdateConnectClusterRequest, Operation> updateConnectClusterCallable() {
return updateConnectClusterCallable;
}
@Override
public OperationCallable<UpdateConnectClusterRequest, ConnectCluster, OperationMetadata>
updateConnectClusterOperationCallable() {
return updateConnectClusterOperationCallable;
}
@Override
public UnaryCallable<DeleteConnectClusterRequest, Operation> deleteConnectClusterCallable() {
return deleteConnectClusterCallable;
}
@Override
public OperationCallable<DeleteConnectClusterRequest, Empty, OperationMetadata>
deleteConnectClusterOperationCallable() {
return deleteConnectClusterOperationCallable;
}
@Override
public UnaryCallable<ListConnectorsRequest, ListConnectorsResponse> listConnectorsCallable() {
return listConnectorsCallable;
}
@Override
public UnaryCallable<ListConnectorsRequest, ListConnectorsPagedResponse>
listConnectorsPagedCallable() {
return listConnectorsPagedCallable;
}
@Override
public UnaryCallable<GetConnectorRequest, Connector> getConnectorCallable() {
return getConnectorCallable;
}
@Override
public UnaryCallable<CreateConnectorRequest, Connector> createConnectorCallable() {
return createConnectorCallable;
}
@Override
public UnaryCallable<UpdateConnectorRequest, Connector> updateConnectorCallable() {
return updateConnectorCallable;
}
@Override
public UnaryCallable<DeleteConnectorRequest, Empty> deleteConnectorCallable() {
return deleteConnectorCallable;
}
@Override
public UnaryCallable<PauseConnectorRequest, PauseConnectorResponse> pauseConnectorCallable() {
return pauseConnectorCallable;
}
@Override
public UnaryCallable<ResumeConnectorRequest, ResumeConnectorResponse> resumeConnectorCallable() {
return resumeConnectorCallable;
}
@Override
public UnaryCallable<RestartConnectorRequest, RestartConnectorResponse>
restartConnectorCallable() {
return restartConnectorCallable;
}
@Override
public UnaryCallable<StopConnectorRequest, StopConnectorResponse> stopConnectorCallable() {
return stopConnectorCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 36,390 | java-resourcemanager/google-cloud-resourcemanager/src/main/java/com/google/cloud/resourcemanager/v3/stub/HttpJsonTagKeysStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.resourcemanager.v3.stub;
import static com.google.cloud.resourcemanager.v3.TagKeysClient.ListTagKeysPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.resourcemanager.v3.CreateTagKeyMetadata;
import com.google.cloud.resourcemanager.v3.CreateTagKeyRequest;
import com.google.cloud.resourcemanager.v3.DeleteTagKeyMetadata;
import com.google.cloud.resourcemanager.v3.DeleteTagKeyRequest;
import com.google.cloud.resourcemanager.v3.GetNamespacedTagKeyRequest;
import com.google.cloud.resourcemanager.v3.GetTagKeyRequest;
import com.google.cloud.resourcemanager.v3.ListTagKeysRequest;
import com.google.cloud.resourcemanager.v3.ListTagKeysResponse;
import com.google.cloud.resourcemanager.v3.TagKey;
import com.google.cloud.resourcemanager.v3.UpdateTagKeyMetadata;
import com.google.cloud.resourcemanager.v3.UpdateTagKeyRequest;
import com.google.common.collect.ImmutableMap;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the TagKeys service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonTagKeysStub extends TagKeysStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(TagKey.getDescriptor())
.add(UpdateTagKeyMetadata.getDescriptor())
.add(CreateTagKeyMetadata.getDescriptor())
.add(DeleteTagKeyMetadata.getDescriptor())
.build();
private static final ApiMethodDescriptor<ListTagKeysRequest, ListTagKeysResponse>
listTagKeysMethodDescriptor =
ApiMethodDescriptor.<ListTagKeysRequest, ListTagKeysResponse>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/ListTagKeys")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListTagKeysRequest>newBuilder()
.setPath(
"/v3/tagKeys",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListTagKeysRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListTagKeysRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "parent", request.getParent());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListTagKeysResponse>newBuilder()
.setDefaultInstance(ListTagKeysResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetTagKeyRequest, TagKey> getTagKeyMethodDescriptor =
ApiMethodDescriptor.<GetTagKeyRequest, TagKey>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/GetTagKey")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetTagKeyRequest>newBuilder()
.setPath(
"/v3/{name=tagKeys/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<TagKey>newBuilder()
.setDefaultInstance(TagKey.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetNamespacedTagKeyRequest, TagKey>
getNamespacedTagKeyMethodDescriptor =
ApiMethodDescriptor.<GetNamespacedTagKeyRequest, TagKey>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/GetNamespacedTagKey")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetNamespacedTagKeyRequest>newBuilder()
.setPath(
"/v3/tagKeys/namespaced",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetNamespacedTagKeyRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetNamespacedTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "name", request.getName());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<TagKey>newBuilder()
.setDefaultInstance(TagKey.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<CreateTagKeyRequest, Operation>
createTagKeyMethodDescriptor =
ApiMethodDescriptor.<CreateTagKeyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/CreateTagKey")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateTagKeyRequest>newBuilder()
.setPath(
"/v3/tagKeys",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateTagKeyRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(
fields, "validateOnly", request.getValidateOnly());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("tagKey", request.getTagKey(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(CreateTagKeyRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<UpdateTagKeyRequest, Operation>
updateTagKeyMethodDescriptor =
ApiMethodDescriptor.<UpdateTagKeyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/UpdateTagKey")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateTagKeyRequest>newBuilder()
.setPath(
"/v3/{tagKey.name=tagKeys/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "tagKey.name", request.getTagKey().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(
fields, "validateOnly", request.getValidateOnly());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("tagKey", request.getTagKey(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(UpdateTagKeyRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<DeleteTagKeyRequest, Operation>
deleteTagKeyMethodDescriptor =
ApiMethodDescriptor.<DeleteTagKeyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/DeleteTagKey")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteTagKeyRequest>newBuilder()
.setPath(
"/v3/{name=tagKeys/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteTagKeyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "etag", request.getEtag());
serializer.putQueryParam(
fields, "validateOnly", request.getValidateOnly());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteTagKeyRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<GetIamPolicyRequest, Policy>
getIamPolicyMethodDescriptor =
ApiMethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/GetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetIamPolicyRequest>newBuilder()
.setPath(
"/v3/{resource=tagKeys/*}:getIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<SetIamPolicyRequest, Policy>
setIamPolicyMethodDescriptor =
ApiMethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/SetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<SetIamPolicyRequest>newBuilder()
.setPath(
"/v3/{resource=tagKeys/*}:setIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
ApiMethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setFullMethodName("google.cloud.resourcemanager.v3.TagKeys/TestIamPermissions")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<TestIamPermissionsRequest>newBuilder()
.setPath(
"/v3/{resource=tagKeys/*}:testIamPermissions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<TestIamPermissionsResponse>newBuilder()
.setDefaultInstance(TestIamPermissionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<ListTagKeysRequest, ListTagKeysResponse> listTagKeysCallable;
private final UnaryCallable<ListTagKeysRequest, ListTagKeysPagedResponse>
listTagKeysPagedCallable;
private final UnaryCallable<GetTagKeyRequest, TagKey> getTagKeyCallable;
private final UnaryCallable<GetNamespacedTagKeyRequest, TagKey> getNamespacedTagKeyCallable;
private final UnaryCallable<CreateTagKeyRequest, Operation> createTagKeyCallable;
private final OperationCallable<CreateTagKeyRequest, TagKey, CreateTagKeyMetadata>
createTagKeyOperationCallable;
private final UnaryCallable<UpdateTagKeyRequest, Operation> updateTagKeyCallable;
private final OperationCallable<UpdateTagKeyRequest, TagKey, UpdateTagKeyMetadata>
updateTagKeyOperationCallable;
private final UnaryCallable<DeleteTagKeyRequest, Operation> deleteTagKeyCallable;
private final OperationCallable<DeleteTagKeyRequest, TagKey, DeleteTagKeyMetadata>
deleteTagKeyOperationCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonTagKeysStub create(TagKeysStubSettings settings) throws IOException {
return new HttpJsonTagKeysStub(settings, ClientContext.create(settings));
}
public static final HttpJsonTagKeysStub create(ClientContext clientContext) throws IOException {
return new HttpJsonTagKeysStub(TagKeysStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonTagKeysStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonTagKeysStub(
TagKeysStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonTagKeysStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonTagKeysStub(TagKeysStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonTagKeysCallableFactory());
}
/**
* Constructs an instance of HttpJsonTagKeysStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonTagKeysStub(
TagKeysStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder().setGet("/v3/{name=operations/**}").build())
.build());
HttpJsonCallSettings<ListTagKeysRequest, ListTagKeysResponse> listTagKeysTransportSettings =
HttpJsonCallSettings.<ListTagKeysRequest, ListTagKeysResponse>newBuilder()
.setMethodDescriptor(listTagKeysMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<GetTagKeyRequest, TagKey> getTagKeyTransportSettings =
HttpJsonCallSettings.<GetTagKeyRequest, TagKey>newBuilder()
.setMethodDescriptor(getTagKeyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetNamespacedTagKeyRequest, TagKey> getNamespacedTagKeyTransportSettings =
HttpJsonCallSettings.<GetNamespacedTagKeyRequest, TagKey>newBuilder()
.setMethodDescriptor(getNamespacedTagKeyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<CreateTagKeyRequest, Operation> createTagKeyTransportSettings =
HttpJsonCallSettings.<CreateTagKeyRequest, Operation>newBuilder()
.setMethodDescriptor(createTagKeyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<UpdateTagKeyRequest, Operation> updateTagKeyTransportSettings =
HttpJsonCallSettings.<UpdateTagKeyRequest, Operation>newBuilder()
.setMethodDescriptor(updateTagKeyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("tag_key.name", String.valueOf(request.getTagKey().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteTagKeyRequest, Operation> deleteTagKeyTransportSettings =
HttpJsonCallSettings.<DeleteTagKeyRequest, Operation>newBuilder()
.setMethodDescriptor(deleteTagKeyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
HttpJsonCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
HttpJsonCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
HttpJsonCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
this.listTagKeysCallable =
callableFactory.createUnaryCallable(
listTagKeysTransportSettings, settings.listTagKeysSettings(), clientContext);
this.listTagKeysPagedCallable =
callableFactory.createPagedCallable(
listTagKeysTransportSettings, settings.listTagKeysSettings(), clientContext);
this.getTagKeyCallable =
callableFactory.createUnaryCallable(
getTagKeyTransportSettings, settings.getTagKeySettings(), clientContext);
this.getNamespacedTagKeyCallable =
callableFactory.createUnaryCallable(
getNamespacedTagKeyTransportSettings,
settings.getNamespacedTagKeySettings(),
clientContext);
this.createTagKeyCallable =
callableFactory.createUnaryCallable(
createTagKeyTransportSettings, settings.createTagKeySettings(), clientContext);
this.createTagKeyOperationCallable =
callableFactory.createOperationCallable(
createTagKeyTransportSettings,
settings.createTagKeyOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.updateTagKeyCallable =
callableFactory.createUnaryCallable(
updateTagKeyTransportSettings, settings.updateTagKeySettings(), clientContext);
this.updateTagKeyOperationCallable =
callableFactory.createOperationCallable(
updateTagKeyTransportSettings,
settings.updateTagKeyOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.deleteTagKeyCallable =
callableFactory.createUnaryCallable(
deleteTagKeyTransportSettings, settings.deleteTagKeySettings(), clientContext);
this.deleteTagKeyOperationCallable =
callableFactory.createOperationCallable(
deleteTagKeyTransportSettings,
settings.deleteTagKeyOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(listTagKeysMethodDescriptor);
methodDescriptors.add(getTagKeyMethodDescriptor);
methodDescriptors.add(getNamespacedTagKeyMethodDescriptor);
methodDescriptors.add(createTagKeyMethodDescriptor);
methodDescriptors.add(updateTagKeyMethodDescriptor);
methodDescriptors.add(deleteTagKeyMethodDescriptor);
methodDescriptors.add(getIamPolicyMethodDescriptor);
methodDescriptors.add(setIamPolicyMethodDescriptor);
methodDescriptors.add(testIamPermissionsMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<ListTagKeysRequest, ListTagKeysResponse> listTagKeysCallable() {
return listTagKeysCallable;
}
@Override
public UnaryCallable<ListTagKeysRequest, ListTagKeysPagedResponse> listTagKeysPagedCallable() {
return listTagKeysPagedCallable;
}
@Override
public UnaryCallable<GetTagKeyRequest, TagKey> getTagKeyCallable() {
return getTagKeyCallable;
}
@Override
public UnaryCallable<GetNamespacedTagKeyRequest, TagKey> getNamespacedTagKeyCallable() {
return getNamespacedTagKeyCallable;
}
@Override
public UnaryCallable<CreateTagKeyRequest, Operation> createTagKeyCallable() {
return createTagKeyCallable;
}
@Override
public OperationCallable<CreateTagKeyRequest, TagKey, CreateTagKeyMetadata>
createTagKeyOperationCallable() {
return createTagKeyOperationCallable;
}
@Override
public UnaryCallable<UpdateTagKeyRequest, Operation> updateTagKeyCallable() {
return updateTagKeyCallable;
}
@Override
public OperationCallable<UpdateTagKeyRequest, TagKey, UpdateTagKeyMetadata>
updateTagKeyOperationCallable() {
return updateTagKeyOperationCallable;
}
@Override
public UnaryCallable<DeleteTagKeyRequest, Operation> deleteTagKeyCallable() {
return deleteTagKeyCallable;
}
@Override
public OperationCallable<DeleteTagKeyRequest, TagKey, DeleteTagKeyMetadata>
deleteTagKeyOperationCallable() {
return deleteTagKeyOperationCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
google/j2objc | 36,245 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/net/Inet6Address.java | /*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.net;
import com.google.j2objc.annotations.Weak;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamField;
import java.util.Enumeration;
import java.util.Arrays;
import libcore.io.Libcore;
import libcore.io.NetworkOs;
import static libcore.io.OsConstants.*;
/*-[
#include "JreRetainedWith.h"
]-*/
/**
* This class represents an Internet Protocol version 6 (IPv6) address.
* Defined by <a href="http://www.ietf.org/rfc/rfc2373.txt">
* <i>RFC 2373: IP Version 6 Addressing Architecture</i></a>.
*
* <h3> <A NAME="format">Textual representation of IP addresses</a> </h3>
*
* Textual representation of IPv6 address used as input to methods
* takes one of the following forms:
*
* <ol>
* <li><p> <A NAME="lform">The preferred form</a> is x:x:x:x:x:x:x:x,
* where the 'x's are
* the hexadecimal values of the eight 16-bit pieces of the
* address. This is the full form. For example,
*
* <blockquote><table cellpadding=0 cellspacing=0 summary="layout">
* <tr><td>{@code 1080:0:0:0:8:800:200C:417A}<td></tr>
* </table></blockquote>
*
* <p> Note that it is not necessary to write the leading zeros in
* an individual field. However, there must be at least one numeral
* in every field, except as described below.</li>
*
* <li><p> Due to some methods of allocating certain styles of IPv6
* addresses, it will be common for addresses to contain long
* strings of zero bits. In order to make writing addresses
* containing zero bits easier, a special syntax is available to
* compress the zeros. The use of "::" indicates multiple groups
* of 16-bits of zeros. The "::" can only appear once in an address.
* The "::" can also be used to compress the leading and/or trailing
* zeros in an address. For example,
*
* <blockquote><table cellpadding=0 cellspacing=0 summary="layout">
* <tr><td>{@code 1080::8:800:200C:417A}<td></tr>
* </table></blockquote>
*
* <li><p> An alternative form that is sometimes more convenient
* when dealing with a mixed environment of IPv4 and IPv6 nodes is
* x:x:x:x:x:x:d.d.d.d, where the 'x's are the hexadecimal values
* of the six high-order 16-bit pieces of the address, and the 'd's
* are the decimal values of the four low-order 8-bit pieces of the
* standard IPv4 representation address, for example,
*
* <blockquote><table cellpadding=0 cellspacing=0 summary="layout">
* <tr><td>{@code ::FFFF:129.144.52.38}<td></tr>
* <tr><td>{@code ::129.144.52.38}<td></tr>
* </table></blockquote>
*
* <p> where "::FFFF:d.d.d.d" and "::d.d.d.d" are, respectively, the
* general forms of an IPv4-mapped IPv6 address and an
* IPv4-compatible IPv6 address. Note that the IPv4 portion must be
* in the "d.d.d.d" form. The following forms are invalid:
*
* <blockquote><table cellpadding=0 cellspacing=0 summary="layout">
* <tr><td>{@code ::FFFF:d.d.d}<td></tr>
* <tr><td>{@code ::FFFF:d.d}<td></tr>
* <tr><td>{@code ::d.d.d}<td></tr>
* <tr><td>{@code ::d.d}<td></tr>
* </table></blockquote>
*
* <p> The following form:
*
* <blockquote><table cellpadding=0 cellspacing=0 summary="layout">
* <tr><td>{@code ::FFFF:d}<td></tr>
* </table></blockquote>
*
* <p> is valid, however it is an unconventional representation of
* the IPv4-compatible IPv6 address,
*
* <blockquote><table cellpadding=0 cellspacing=0 summary="layout">
* <tr><td>{@code ::255.255.0.d}<td></tr>
* </table></blockquote>
*
* <p> while "::d" corresponds to the general IPv6 address
* "0:0:0:0:0:0:0:d".</li>
* </ol>
*
* <p> For methods that return a textual representation as output
* value, the full form is used. Inet6Address will return the full
* form because it is unambiguous when used in combination with other
* textual data.
*
* <h4> Special IPv6 address </h4>
*
* <blockquote>
* <table cellspacing=2 summary="Description of IPv4-mapped address">
* <tr><th valign=top><i>IPv4-mapped address</i></th>
* <td>Of the form::ffff:w.x.y.z, this IPv6 address is used to
* represent an IPv4 address. It allows the native program to
* use the same address data structure and also the same
* socket when communicating with both IPv4 and IPv6 nodes.
*
* <p>In InetAddress and Inet6Address, it is used for internal
* representation; it has no functional role. Java will never
* return an IPv4-mapped address. These classes can take an
* IPv4-mapped address as input, both in byte array and text
* representation. However, it will be converted into an IPv4
* address.</td></tr>
* </table></blockquote>
*
* <h4><A NAME="scoped">Textual representation of IPv6 scoped addresses</a></h4>
*
* <p> The textual representation of IPv6 addresses as described above can be
* extended to specify IPv6 scoped addresses. This extension to the basic
* addressing architecture is described in [draft-ietf-ipngwg-scoping-arch-04.txt].
*
* <p> Because link-local and site-local addresses are non-global, it is possible
* that different hosts may have the same destination address and may be
* reachable through different interfaces on the same originating system. In
* this case, the originating system is said to be connected to multiple zones
* of the same scope. In order to disambiguate which is the intended destination
* zone, it is possible to append a zone identifier (or <i>scope_id</i>) to an
* IPv6 address.
*
* <p> The general format for specifying the <i>scope_id</i> is the following:
*
* <blockquote><i>IPv6-address</i>%<i>scope_id</i></blockquote>
* <p> The IPv6-address is a literal IPv6 address as described above.
* The <i>scope_id</i> refers to an interface on the local system, and it can be
* specified in two ways.
* <ol><li><i>As a numeric identifier.</i> This must be a positive integer
* that identifies the particular interface and scope as understood by the
* system. Usually, the numeric values can be determined through administration
* tools on the system. Each interface may have multiple values, one for each
* scope. If the scope is unspecified, then the default value used is zero.</li>
* <li><i>As a string.</i> This must be the exact string that is returned by
* {@link java.net.NetworkInterface#getName()} for the particular interface in
* question. When an Inet6Address is created in this way, the numeric scope-id
* is determined at the time the object is created by querying the relevant
* NetworkInterface.</li></ol>
*
* <p> Note also, that the numeric <i>scope_id</i> can be retrieved from
* Inet6Address instances returned from the NetworkInterface class. This can be
* used to find out the current scope ids configured on the system.
* @since 1.4
*/
public final
class Inet6Address extends InetAddress {
final static int INADDRSZ = 16;
// BEGIN Android-removed: Remove special handling for link-local addresses.
/*
* cached scope_id - for link-local address use only.
*
private transient int cached_scope_id; // 0
*/
// END Android-removed: Remove special handling for link-local addresses.
// BEGIN Android-added: Define special-purpose IPv6 address.
/** @hide */
public static final InetAddress ANY =
new Inet6Address("::", new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, 0);
/** @hide */
public static final InetAddress LOOPBACK = new Inet6Address("ip6-localhost",
new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}, 0);
// END Android-added: Define special-purpose IPv6 address.
/* J2ObjC removed: private */ class Inet6AddressHolder {
Inet6AddressHolder() {
ipaddress = new byte[INADDRSZ];
}
Inet6AddressHolder(
byte[] ipaddress, int scope_id, boolean scope_id_set,
NetworkInterface ifname, boolean scope_ifname_set)
{
this.ipaddress = ipaddress;
this.scope_id = scope_id;
this.scope_id_set = scope_id_set;
this.scope_ifname_set = scope_ifname_set;
this.scope_ifname = ifname;
}
/**
* Holds a 128-bit (16 bytes) IPv6 address.
*/
byte[] ipaddress;
/**
* scope_id. The scope specified when the object is created. If the object
* is created with an interface name, then the scope_id is not determined
* until the time it is needed.
*/
int scope_id; // 0
/**
* This will be set to true when the scope_id field contains a valid
* integer scope_id.
*/
boolean scope_id_set; // false
/**
* scoped interface. scope_id is derived from this as the scope_id of the first
* address whose scope is the same as this address for the named interface.
*/
@Weak
NetworkInterface scope_ifname; // null
// NetworkInterface has two strong references to InetAddress:
// 1. NetworkInterface.addrs[] -> InetAddress
// 2. NetworkInterface.bindings[] -> InterfaceAddress.address -> InetAddress.
// And this class which is a subclass of InetAddress has a field of type NetworkInterface. To
// resolve this reference cycle, we can either use the RETAINED_WITH_CHILD_NUM_REFS macro and
// set NUM_REFS to 2 or cache all the network interfaces in a map with String keys since the
// number of network interfaces on an iOS device is small.
/*-[
RETAINED_WITH_CHILD_NUM_REFS(scope_ifname_, 2)
]-*/
/**
* set if the object is constructed with a scoped
* interface instead of a numeric scope id.
*/
boolean scope_ifname_set; // false;
void setAddr(byte addr[]) {
if (addr.length == INADDRSZ) { // normal IPv6 address
System.arraycopy(addr, 0, ipaddress, 0, INADDRSZ);
}
}
void init(byte addr[], int scope_id) {
setAddr(addr);
// Android-changed: was >= 0.
if (scope_id > 0) {
this.scope_id = scope_id;
this.scope_id_set = true;
}
}
void init(byte addr[], NetworkInterface nif)
throws UnknownHostException
{
setAddr(addr);
if (nif != null) {
this.scope_id = deriveNumericScope(ipaddress, nif);
this.scope_id_set = true;
this.scope_ifname = nif;
this.scope_ifname_set = true;
}
}
// Android-removed: getnameinfo returns smarter representations than getHostAddress().
/*
String getHostAddress() {
String s = numericToTextFormat(ipaddress);
if (scope_ifname != null) { // must check this first
s = s + "%" + scope_ifname.getName();
} else if (scope_id_set) {
s = s + "%" + scope_id;
}
return s;
}
*/
public boolean equals(Object o) {
if (! (o instanceof Inet6AddressHolder)) {
return false;
}
Inet6AddressHolder that = (Inet6AddressHolder)o;
return Arrays.equals(this.ipaddress, that.ipaddress);
}
public int hashCode() {
if (ipaddress != null) {
int hash = 0;
int i=0;
while (i<INADDRSZ) {
int j=0;
int component=0;
while (j<4 && i<INADDRSZ) {
component = (component << 8) + ipaddress[i];
j++;
i++;
}
hash += component;
}
return hash;
} else {
return 0;
}
}
boolean isIPv4CompatibleAddress() {
if ((ipaddress[0] == 0x00) && (ipaddress[1] == 0x00) &&
(ipaddress[2] == 0x00) && (ipaddress[3] == 0x00) &&
(ipaddress[4] == 0x00) && (ipaddress[5] == 0x00) &&
(ipaddress[6] == 0x00) && (ipaddress[7] == 0x00) &&
(ipaddress[8] == 0x00) && (ipaddress[9] == 0x00) &&
(ipaddress[10] == 0x00) && (ipaddress[11] == 0x00)) {
return true;
}
return false;
}
boolean isMulticastAddress() {
return ((ipaddress[0] & 0xff) == 0xff);
}
boolean isAnyLocalAddress() {
byte test = 0x00;
for (int i = 0; i < INADDRSZ; i++) {
test |= ipaddress[i];
}
return (test == 0x00);
}
boolean isLoopbackAddress() {
byte test = 0x00;
for (int i = 0; i < 15; i++) {
test |= ipaddress[i];
}
return (test == 0x00) && (ipaddress[15] == 0x01);
}
boolean isLinkLocalAddress() {
return ((ipaddress[0] & 0xff) == 0xfe
&& (ipaddress[1] & 0xc0) == 0x80);
}
boolean isSiteLocalAddress() {
return ((ipaddress[0] & 0xff) == 0xfe
&& (ipaddress[1] & 0xc0) == 0xc0);
}
boolean isMCGlobal() {
return ((ipaddress[0] & 0xff) == 0xff
&& (ipaddress[1] & 0x0f) == 0x0e);
}
boolean isMCNodeLocal() {
return ((ipaddress[0] & 0xff) == 0xff
&& (ipaddress[1] & 0x0f) == 0x01);
}
boolean isMCLinkLocal() {
return ((ipaddress[0] & 0xff) == 0xff
&& (ipaddress[1] & 0x0f) == 0x02);
}
boolean isMCSiteLocal() {
return ((ipaddress[0] & 0xff) == 0xff
&& (ipaddress[1] & 0x0f) == 0x05);
}
boolean isMCOrgLocal() {
return ((ipaddress[0] & 0xff) == 0xff
&& (ipaddress[1] & 0x0f) == 0x08);
}
}
/* J2ObjC removed: private */ final transient Inet6AddressHolder holder6;
private static final long serialVersionUID = 6880410070516793377L;
// BEGIN Android-removed: Android doesn't need to call native init.
/*
// Perform native initialization
static { init(); }
// END Android-removed: Android doesn't need to call native init.
*/
Inet6Address() {
super();
holder.init(null, AF_INET6);
holder6 = new Inet6AddressHolder();
}
/* checking of value for scope_id should be done by caller
* scope_id must be >= 0, or -1 to indicate not being set
*/
Inet6Address(String hostName, byte addr[], int scope_id) {
holder.init(hostName, AF_INET6);
holder6 = new Inet6AddressHolder();
holder6.init(addr, scope_id);
}
Inet6Address(String hostName, byte addr[]) {
holder6 = new Inet6AddressHolder();
try {
initif (hostName, addr, null);
} catch (UnknownHostException e) {} /* cant happen if ifname is null */
}
Inet6Address (String hostName, byte addr[], NetworkInterface nif)
throws UnknownHostException
{
holder6 = new Inet6AddressHolder();
initif (hostName, addr, nif);
}
Inet6Address (String hostName, byte addr[], String ifname)
throws UnknownHostException
{
holder6 = new Inet6AddressHolder();
initstr (hostName, addr, ifname);
}
/**
* Create an Inet6Address in the exact manner of {@link
* InetAddress#getByAddress(String,byte[])} except that the IPv6 scope_id is
* set to the value corresponding to the given interface for the address
* type specified in {@code addr}. The call will fail with an
* UnknownHostException if the given interface does not have a numeric
* scope_id assigned for the given address type (eg. link-local or site-local).
* See <a href="Inet6Address.html#scoped">here</a> for a description of IPv6
* scoped addresses.
*
* @param host the specified host
* @param addr the raw IP address in network byte order
* @param nif an interface this address must be associated with.
* @return an Inet6Address object created from the raw IP address.
* @throws UnknownHostException
* if IP address is of illegal length, or if the interface does not
* have a numeric scope_id assigned for the given address type.
*
* @since 1.5
*/
public static Inet6Address getByAddress(String host, byte[] addr,
NetworkInterface nif)
throws UnknownHostException
{
if (host != null && host.length() > 0 && host.charAt(0) == '[') {
if (host.charAt(host.length()-1) == ']') {
host = host.substring(1, host.length() -1);
}
}
if (addr != null) {
if (addr.length == Inet6Address.INADDRSZ) {
return new Inet6Address(host, addr, nif);
}
}
throw new UnknownHostException("addr is of illegal length");
}
/**
* Create an Inet6Address in the exact manner of {@link
* InetAddress#getByAddress(String,byte[])} except that the IPv6 scope_id is
* set to the given numeric value. The scope_id is not checked to determine
* if it corresponds to any interface on the system.
* See <a href="Inet6Address.html#scoped">here</a> for a description of IPv6
* scoped addresses.
*
* @param host the specified host
* @param addr the raw IP address in network byte order
* @param scope_id the numeric scope_id for the address.
* @return an Inet6Address object created from the raw IP address.
* @throws UnknownHostException if IP address is of illegal length.
*
* @since 1.5
*/
public static Inet6Address getByAddress(String host, byte[] addr,
int scope_id)
throws UnknownHostException
{
if (host != null && host.length() > 0 && host.charAt(0) == '[') {
if (host.charAt(host.length()-1) == ']') {
host = host.substring(1, host.length() -1);
}
}
if (addr != null) {
if (addr.length == Inet6Address.INADDRSZ) {
return new Inet6Address(host, addr, scope_id);
}
}
throw new UnknownHostException("addr is of illegal length");
}
private void initstr(String hostName, byte addr[], String ifname)
throws UnknownHostException
{
try {
NetworkInterface nif = NetworkInterface.getByName (ifname);
if (nif == null) {
throw new UnknownHostException ("no such interface " + ifname);
}
initif (hostName, addr, nif);
} catch (SocketException e) {
throw new UnknownHostException ("SocketException thrown" + ifname);
}
}
private void initif(String hostName, byte addr[], NetworkInterface nif)
throws UnknownHostException
{
int family = -1;
holder6.init(addr, nif);
if (addr.length == INADDRSZ) { // normal IPv6 address
family = AF_INET6;
}
holder.init(hostName, family);
}
/* check the two Ipv6 addresses and return false if they are both
* non global address types, but not the same.
* (ie. one is sitelocal and the other linklocal)
* return true otherwise.
*/
private static boolean isDifferentLocalAddressType(
byte[] thisAddr, byte[] otherAddr) {
if (Inet6Address.isLinkLocalAddress(thisAddr) &&
!Inet6Address.isLinkLocalAddress(otherAddr)) {
return false;
}
if (Inet6Address.isSiteLocalAddress(thisAddr) &&
!Inet6Address.isSiteLocalAddress(otherAddr)) {
return false;
}
return true;
}
private static int deriveNumericScope (byte[] thisAddr, NetworkInterface ifc) throws UnknownHostException {
Enumeration<InetAddress> addresses = ifc.getInetAddresses();
while (addresses.hasMoreElements()) {
InetAddress addr = addresses.nextElement();
if (!(addr instanceof Inet6Address)) {
continue;
}
Inet6Address ia6_addr = (Inet6Address)addr;
/* check if site or link local prefixes match */
if (!isDifferentLocalAddressType(thisAddr, ia6_addr.getAddress())){
/* type not the same, so carry on searching */
continue;
}
/* found a matching address - return its scope_id */
return ia6_addr.getScopeId();
}
throw new UnknownHostException ("no scope_id found");
}
private int deriveNumericScope (String ifname) throws UnknownHostException {
Enumeration<NetworkInterface> en;
try {
en = NetworkInterface.getNetworkInterfaces();
} catch (SocketException e) {
throw new UnknownHostException ("could not enumerate local network interfaces");
}
while (en.hasMoreElements()) {
NetworkInterface ifc = en.nextElement();
if (ifc.getName().equals (ifname)) {
return deriveNumericScope(holder6.ipaddress, ifc);
}
}
throw new UnknownHostException ("No matching address found for interface : " +ifname);
}
/**
* @serialField ipaddress byte[]
* @serialField scope_id int
* @serialField scope_id_set boolean
* @serialField scope_ifname_set boolean
* @serialField ifname String
*/
private static final ObjectStreamField[] serialPersistentFields = {
new ObjectStreamField("ipaddress", byte[].class),
new ObjectStreamField("scope_id", int.class),
new ObjectStreamField("scope_id_set", boolean.class),
new ObjectStreamField("scope_ifname_set", boolean.class),
new ObjectStreamField("ifname", String.class)
};
private static final long FIELDS_OFFSET;
private static final sun.misc.Unsafe UNSAFE;
static {
try {
sun.misc.Unsafe unsafe = sun.misc.Unsafe.getUnsafe();
FIELDS_OFFSET = unsafe.objectFieldOffset(
Inet6Address.class.getDeclaredField("holder6"));
UNSAFE = unsafe;
} catch (ReflectiveOperationException e) {
throw new Error(e);
}
}
/**
* restore the state of this object from stream
* including the scope information, only if the
* scoped interface name is valid on this system
*/
private void readObject(ObjectInputStream s)
throws IOException, ClassNotFoundException {
NetworkInterface scope_ifname = null;
// Android-changed: was getClass().getClassLoader() != null.
if (getClass().getClassLoader() != Class.class.getClassLoader()) {
throw new SecurityException ("invalid address type");
}
ObjectInputStream.GetField gf = s.readFields();
byte[] ipaddress = (byte[])gf.get("ipaddress", null);
int scope_id = (int)gf.get("scope_id", -1);
boolean scope_id_set = (boolean)gf.get("scope_id_set", false);
boolean scope_ifname_set = (boolean)gf.get("scope_ifname_set", false);
String ifname = (String)gf.get("ifname", null);
if (ifname != null && !"".equals (ifname)) {
try {
scope_ifname = NetworkInterface.getByName(ifname);
if (scope_ifname == null) {
/* the interface does not exist on this system, so we clear
* the scope information completely */
scope_id_set = false;
scope_ifname_set = false;
scope_id = 0;
} else {
scope_ifname_set = true;
try {
scope_id = deriveNumericScope (ipaddress, scope_ifname);
} catch (UnknownHostException e) {
// typically should not happen, but it may be that
// the machine being used for deserialization has
// the same interface name but without IPv6 configured.
}
}
} catch (SocketException e) {}
}
/* if ifname was not supplied, then the numeric info is used */
ipaddress = ipaddress.clone();
// Check that our invariants are satisfied
if (ipaddress.length != INADDRSZ) {
throw new InvalidObjectException("invalid address length: "+
ipaddress.length);
}
if (holder().getFamily() != AF_INET6) {
throw new InvalidObjectException("invalid address family type");
}
Inet6AddressHolder h = new Inet6AddressHolder(
ipaddress, scope_id, scope_id_set, scope_ifname, scope_ifname_set
);
UNSAFE.putObject(this, FIELDS_OFFSET, h);
}
/**
* default behavior is overridden in order to write the
* scope_ifname field as a String, rather than a NetworkInterface
* which is not serializable
*/
private synchronized void writeObject(ObjectOutputStream s)
throws IOException
{
String ifname = null;
if (holder6.scope_ifname != null) {
ifname = holder6.scope_ifname.getName();
holder6.scope_ifname_set = true;
}
ObjectOutputStream.PutField pfields = s.putFields();
pfields.put("ipaddress", holder6.ipaddress);
pfields.put("scope_id", holder6.scope_id);
pfields.put("scope_id_set", holder6.scope_id_set);
pfields.put("scope_ifname_set", holder6.scope_ifname_set);
pfields.put("ifname", ifname);
s.writeFields();
}
/**
* Utility routine to check if the InetAddress is an IP multicast
* address. 11111111 at the start of the address identifies the
* address as being a multicast address.
*
* @return a {@code boolean} indicating if the InetAddress is an IP
* multicast address
*
* @since JDK1.1
*/
@Override
public boolean isMulticastAddress() {
return holder6.isMulticastAddress();
}
/**
* Utility routine to check if the InetAddress in a wildcard address.
*
* @return a {@code boolean} indicating if the Inetaddress is
* a wildcard address.
*
* @since 1.4
*/
@Override
public boolean isAnyLocalAddress() {
return holder6.isAnyLocalAddress();
}
/**
* Utility routine to check if the InetAddress is a loopback address.
*
* @return a {@code boolean} indicating if the InetAddress is a loopback
* address; or false otherwise.
*
* @since 1.4
*/
@Override
public boolean isLoopbackAddress() {
return holder6.isLoopbackAddress();
}
/**
* Utility routine to check if the InetAddress is an link local address.
*
* @return a {@code boolean} indicating if the InetAddress is a link local
* address; or false if address is not a link local unicast address.
*
* @since 1.4
*/
@Override
public boolean isLinkLocalAddress() {
return holder6.isLinkLocalAddress();
}
/* static version of above */
static boolean isLinkLocalAddress(byte[] ipaddress) {
return ((ipaddress[0] & 0xff) == 0xfe
&& (ipaddress[1] & 0xc0) == 0x80);
}
/**
* Utility routine to check if the InetAddress is a site local address.
*
* @return a {@code boolean} indicating if the InetAddress is a site local
* address; or false if address is not a site local unicast address.
*
* @since 1.4
*/
@Override
public boolean isSiteLocalAddress() {
return holder6.isSiteLocalAddress();
}
/* static version of above */
static boolean isSiteLocalAddress(byte[] ipaddress) {
return ((ipaddress[0] & 0xff) == 0xfe
&& (ipaddress[1] & 0xc0) == 0xc0);
}
/**
* Utility routine to check if the multicast address has global scope.
*
* @return a {@code boolean} indicating if the address has is a multicast
* address of global scope, false if it is not of global scope or
* it is not a multicast address
*
* @since 1.4
*/
@Override
public boolean isMCGlobal() {
return holder6.isMCGlobal();
}
/**
* Utility routine to check if the multicast address has node scope.
*
* @return a {@code boolean} indicating if the address has is a multicast
* address of node-local scope, false if it is not of node-local
* scope or it is not a multicast address
*
* @since 1.4
*/
@Override
public boolean isMCNodeLocal() {
return holder6.isMCNodeLocal();
}
/**
* Utility routine to check if the multicast address has link scope.
*
* @return a {@code boolean} indicating if the address has is a multicast
* address of link-local scope, false if it is not of link-local
* scope or it is not a multicast address
*
* @since 1.4
*/
@Override
public boolean isMCLinkLocal() {
return holder6.isMCLinkLocal();
}
/**
* Utility routine to check if the multicast address has site scope.
*
* @return a {@code boolean} indicating if the address has is a multicast
* address of site-local scope, false if it is not of site-local
* scope or it is not a multicast address
*
* @since 1.4
*/
@Override
public boolean isMCSiteLocal() {
return holder6.isMCSiteLocal();
}
/**
* Utility routine to check if the multicast address has organization scope.
*
* @return a {@code boolean} indicating if the address has is a multicast
* address of organization-local scope, false if it is not of
* organization-local scope or it is not a multicast address
*
* @since 1.4
*/
@Override
public boolean isMCOrgLocal() {
return holder6.isMCOrgLocal();
}
/**
* Returns the raw IP address of this {@code InetAddress} object. The result
* is in network byte order: the highest order byte of the address is in
* {@code getAddress()[0]}.
*
* @return the raw IP address of this object.
*/
@Override
public byte[] getAddress() {
return holder6.ipaddress.clone();
}
/**
* Returns the numeric scopeId, if this instance is associated with
* an interface. If no scoped_id is set, the returned value is zero.
*
* @return the scopeId, or zero if not set.
*
* @since 1.5
*/
public int getScopeId() {
return holder6.scope_id;
}
/**
* Returns the scoped interface, if this instance was created with
* with a scoped interface.
*
* @return the scoped interface, or null if not set.
* @since 1.5
*/
public NetworkInterface getScopedInterface() {
return holder6.scope_ifname;
}
/**
* Returns the IP address string in textual presentation. If the instance
* was created specifying a scope identifier then the scope id is appended
* to the IP address preceded by a "%" (per-cent) character. This can be
* either a numeric value or a string, depending on which was used to create
* the instance.
*
* @return the raw IP address in a string format.
*/
@Override
public String getHostAddress() {
// Android-changed: getnameinfo returns smarter representations than getHostAddress().
// return holder6.getHostAddress();
/* J2ObjC modified.
return Libcore.os.getnameinfo(this, NI_NUMERICHOST); // Can't throw.
*/
return NetworkOs.getnameinfo(this, NI_NUMERICHOST); // Can't throw.
}
/**
* Returns a hashcode for this IP address.
*
* @return a hash code value for this IP address.
*/
@Override
public int hashCode() {
return holder6.hashCode();
}
/**
* Compares this object against the specified object. The result is {@code
* true} if and only if the argument is not {@code null} and it represents
* the same IP address as this object.
*
* <p> Two instances of {@code InetAddress} represent the same IP address
* if the length of the byte arrays returned by {@code getAddress} is the
* same for both, and each of the array components is the same for the byte
* arrays.
*
* @param obj the object to compare against.
*
* @return {@code true} if the objects are the same; {@code false} otherwise.
*
* @see java.net.InetAddress#getAddress()
*/
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof Inet6Address))
return false;
Inet6Address inetAddr = (Inet6Address)obj;
return holder6.equals(inetAddr.holder6);
}
/**
* Utility routine to check if the InetAddress is an
* IPv4 compatible IPv6 address.
*
* @return a {@code boolean} indicating if the InetAddress is an IPv4
* compatible IPv6 address; or false if address is IPv4 address.
*
* @since 1.4
*/
public boolean isIPv4CompatibleAddress() {
return holder6.isIPv4CompatibleAddress();
}
// Utilities
private final static int INT16SZ = 2;
/*
* Convert IPv6 binary address into presentation (printable) format.
*
* @param src a byte array representing the IPv6 numeric address
* @return a String representing an IPv6 address in
* textual representation format
* @since 1.4
*/
static String numericToTextFormat(byte[] src) {
StringBuilder sb = new StringBuilder(39);
for (int i = 0; i < (INADDRSZ / INT16SZ); i++) {
sb.append(Integer.toHexString(((src[i<<1]<<8) & 0xff00)
| (src[(i<<1)+1] & 0xff)));
if (i < (INADDRSZ / INT16SZ) -1 ) {
sb.append(":");
}
}
return sb.toString();
}
// BEGIN Android-removed: Android doesn't need to call native init.
/*
* Perform class load-time initializations.
*
private static native void init();
*/
// END Android-removed: Android doesn't need to call native init.
}
|
googleapis/google-cloud-java | 36,039 | java-bigqueryreservation/proto-google-cloud-bigqueryreservation-v1/src/main/java/com/google/cloud/bigquery/reservation/v1/SearchAssignmentsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/reservation/v1/reservation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.reservation.v1;
/**
*
*
* <pre>
* The request for
* [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments].
* Note: "bigquery.reservationAssignments.search" permission is required on the
* related assignee.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest}
*/
public final class SearchAssignmentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest)
SearchAssignmentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchAssignmentsRequest.newBuilder() to construct.
private SearchAssignmentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchAssignmentsRequest() {
parent_ = "";
query_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchAssignmentsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_SearchAssignmentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_SearchAssignmentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.class,
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUERY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object query_ = "";
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The query.
*/
@java.lang.Override
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
}
}
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The bytes for query.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of items to return per page.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, query_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, query_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest other =
(com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getQuery().equals(other.getQuery())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request for
* [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments].
* Note: "bigquery.reservationAssignments.search" permission is required on the
* related assignee.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest)
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_SearchAssignmentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_SearchAssignmentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.class,
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
query_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_SearchAssignmentsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest build() {
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest buildPartial() {
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest result =
new com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.query_ = query_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest) {
return mergeFrom((com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest other) {
if (other
== com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getQuery().isEmpty()) {
query_ = other.query_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
query_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the admin project(containing project and
* location), e.g.:
* `projects/myproject/locations/US`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object query_ = "";
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The query.
*/
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @return The bytes for query.
*/
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @param value The query to set.
* @return This builder for chaining.
*/
public Builder setQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearQuery() {
query_ = getDefaultInstance().getQuery();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Please specify resource name as assignee in the query.
*
* Examples:
*
* * `assignee=projects/myproject`
* * `assignee=folders/123`
* * `assignee=organizations/456`
* </pre>
*
* <code>string query = 2;</code>
*
* @param value The bytes for query to set.
* @return This builder for chaining.
*/
public Builder setQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of items to return per page.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of items to return per page.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of items to return per page.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest)
private static final com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest();
}
public static com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchAssignmentsRequest> PARSER =
new com.google.protobuf.AbstractParser<SearchAssignmentsRequest>() {
@java.lang.Override
public SearchAssignmentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchAssignmentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchAssignmentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.SearchAssignmentsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,460 | java-maps-routeoptimization/grpc-google-maps-routeoptimization-v1/src/main/java/com/google/maps/routeoptimization/v1/RouteOptimizationGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.maps.routeoptimization.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/maps/routeoptimization/v1/route_optimization_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class RouteOptimizationGrpc {
private RouteOptimizationGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.maps.routeoptimization.v1.RouteOptimization";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.maps.routeoptimization.v1.OptimizeToursRequest,
com.google.maps.routeoptimization.v1.OptimizeToursResponse>
getOptimizeToursMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "OptimizeTours",
requestType = com.google.maps.routeoptimization.v1.OptimizeToursRequest.class,
responseType = com.google.maps.routeoptimization.v1.OptimizeToursResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.maps.routeoptimization.v1.OptimizeToursRequest,
com.google.maps.routeoptimization.v1.OptimizeToursResponse>
getOptimizeToursMethod() {
io.grpc.MethodDescriptor<
com.google.maps.routeoptimization.v1.OptimizeToursRequest,
com.google.maps.routeoptimization.v1.OptimizeToursResponse>
getOptimizeToursMethod;
if ((getOptimizeToursMethod = RouteOptimizationGrpc.getOptimizeToursMethod) == null) {
synchronized (RouteOptimizationGrpc.class) {
if ((getOptimizeToursMethod = RouteOptimizationGrpc.getOptimizeToursMethod) == null) {
RouteOptimizationGrpc.getOptimizeToursMethod =
getOptimizeToursMethod =
io.grpc.MethodDescriptor
.<com.google.maps.routeoptimization.v1.OptimizeToursRequest,
com.google.maps.routeoptimization.v1.OptimizeToursResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "OptimizeTours"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.maps.routeoptimization.v1.OptimizeToursRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.maps.routeoptimization.v1.OptimizeToursResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new RouteOptimizationMethodDescriptorSupplier("OptimizeTours"))
.build();
}
}
}
return getOptimizeToursMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest,
com.google.longrunning.Operation>
getBatchOptimizeToursMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "BatchOptimizeTours",
requestType = com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest,
com.google.longrunning.Operation>
getBatchOptimizeToursMethod() {
io.grpc.MethodDescriptor<
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest,
com.google.longrunning.Operation>
getBatchOptimizeToursMethod;
if ((getBatchOptimizeToursMethod = RouteOptimizationGrpc.getBatchOptimizeToursMethod) == null) {
synchronized (RouteOptimizationGrpc.class) {
if ((getBatchOptimizeToursMethod = RouteOptimizationGrpc.getBatchOptimizeToursMethod)
== null) {
RouteOptimizationGrpc.getBatchOptimizeToursMethod =
getBatchOptimizeToursMethod =
io.grpc.MethodDescriptor
.<com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "BatchOptimizeTours"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new RouteOptimizationMethodDescriptorSupplier("BatchOptimizeTours"))
.build();
}
}
}
return getBatchOptimizeToursMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static RouteOptimizationStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationStub>() {
@java.lang.Override
public RouteOptimizationStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationStub(channel, callOptions);
}
};
return RouteOptimizationStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static RouteOptimizationBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationBlockingV2Stub>() {
@java.lang.Override
public RouteOptimizationBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationBlockingV2Stub(channel, callOptions);
}
};
return RouteOptimizationBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static RouteOptimizationBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationBlockingStub>() {
@java.lang.Override
public RouteOptimizationBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationBlockingStub(channel, callOptions);
}
};
return RouteOptimizationBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static RouteOptimizationFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RouteOptimizationFutureStub>() {
@java.lang.Override
public RouteOptimizationFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationFutureStub(channel, callOptions);
}
};
return RouteOptimizationFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Sends an `OptimizeToursRequest` containing a `ShipmentModel` and returns an
* `OptimizeToursResponse` containing `ShipmentRoute`s, which are a set of
* routes to be performed by vehicles minimizing the overall cost.
* A `ShipmentModel` model consists mainly of `Shipment`s that need to be
* carried out and `Vehicle`s that can be used to transport the `Shipment`s.
* The `ShipmentRoute`s assign `Shipment`s to `Vehicle`s. More specifically,
* they assign a series of `Visit`s to each vehicle, where a `Visit`
* corresponds to a `VisitRequest`, which is a pickup or delivery for a
* `Shipment`.
* The goal is to provide an assignment of `ShipmentRoute`s to `Vehicle`s that
* minimizes the total cost where cost has many components defined in the
* `ShipmentModel`.
* </pre>
*/
default void optimizeTours(
com.google.maps.routeoptimization.v1.OptimizeToursRequest request,
io.grpc.stub.StreamObserver<com.google.maps.routeoptimization.v1.OptimizeToursResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getOptimizeToursMethod(), responseObserver);
}
/**
*
*
* <pre>
* Optimizes vehicle tours for one or more `OptimizeToursRequest`
* messages as a batch.
* This method is a Long Running Operation (LRO). The inputs for optimization
* (`OptimizeToursRequest` messages) and outputs (`OptimizeToursResponse`
* messages) are read from and written to Cloud Storage in user-specified
* format. Like the `OptimizeTours` method, each `OptimizeToursRequest`
* contains a `ShipmentModel` and returns an `OptimizeToursResponse`
* containing `ShipmentRoute` fields, which are a set of routes to be
* performed by vehicles minimizing the overall cost.
* The user can poll `operations.get` to check the status of the LRO:
* If the LRO `done` field is false, then at least one request is still
* being processed. Other requests may have completed successfully and their
* results are available in Cloud Storage.
* If the LRO's `done` field is true, then all requests have been processed.
* Any successfully processed requests will have their results available in
* Cloud Storage. Any requests that failed will not have their results
* available in Cloud Storage. If the LRO's `error` field is set, then it
* contains the error from one of the failed requests.
* </pre>
*/
default void batchOptimizeTours(
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getBatchOptimizeToursMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service RouteOptimization.
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
public abstract static class RouteOptimizationImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return RouteOptimizationGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service RouteOptimization.
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
public static final class RouteOptimizationStub
extends io.grpc.stub.AbstractAsyncStub<RouteOptimizationStub> {
private RouteOptimizationStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RouteOptimizationStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationStub(channel, callOptions);
}
/**
*
*
* <pre>
* Sends an `OptimizeToursRequest` containing a `ShipmentModel` and returns an
* `OptimizeToursResponse` containing `ShipmentRoute`s, which are a set of
* routes to be performed by vehicles minimizing the overall cost.
* A `ShipmentModel` model consists mainly of `Shipment`s that need to be
* carried out and `Vehicle`s that can be used to transport the `Shipment`s.
* The `ShipmentRoute`s assign `Shipment`s to `Vehicle`s. More specifically,
* they assign a series of `Visit`s to each vehicle, where a `Visit`
* corresponds to a `VisitRequest`, which is a pickup or delivery for a
* `Shipment`.
* The goal is to provide an assignment of `ShipmentRoute`s to `Vehicle`s that
* minimizes the total cost where cost has many components defined in the
* `ShipmentModel`.
* </pre>
*/
public void optimizeTours(
com.google.maps.routeoptimization.v1.OptimizeToursRequest request,
io.grpc.stub.StreamObserver<com.google.maps.routeoptimization.v1.OptimizeToursResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getOptimizeToursMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Optimizes vehicle tours for one or more `OptimizeToursRequest`
* messages as a batch.
* This method is a Long Running Operation (LRO). The inputs for optimization
* (`OptimizeToursRequest` messages) and outputs (`OptimizeToursResponse`
* messages) are read from and written to Cloud Storage in user-specified
* format. Like the `OptimizeTours` method, each `OptimizeToursRequest`
* contains a `ShipmentModel` and returns an `OptimizeToursResponse`
* containing `ShipmentRoute` fields, which are a set of routes to be
* performed by vehicles minimizing the overall cost.
* The user can poll `operations.get` to check the status of the LRO:
* If the LRO `done` field is false, then at least one request is still
* being processed. Other requests may have completed successfully and their
* results are available in Cloud Storage.
* If the LRO's `done` field is true, then all requests have been processed.
* Any successfully processed requests will have their results available in
* Cloud Storage. Any requests that failed will not have their results
* available in Cloud Storage. If the LRO's `error` field is set, then it
* contains the error from one of the failed requests.
* </pre>
*/
public void batchOptimizeTours(
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getBatchOptimizeToursMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service RouteOptimization.
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
public static final class RouteOptimizationBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<RouteOptimizationBlockingV2Stub> {
private RouteOptimizationBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RouteOptimizationBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Sends an `OptimizeToursRequest` containing a `ShipmentModel` and returns an
* `OptimizeToursResponse` containing `ShipmentRoute`s, which are a set of
* routes to be performed by vehicles minimizing the overall cost.
* A `ShipmentModel` model consists mainly of `Shipment`s that need to be
* carried out and `Vehicle`s that can be used to transport the `Shipment`s.
* The `ShipmentRoute`s assign `Shipment`s to `Vehicle`s. More specifically,
* they assign a series of `Visit`s to each vehicle, where a `Visit`
* corresponds to a `VisitRequest`, which is a pickup or delivery for a
* `Shipment`.
* The goal is to provide an assignment of `ShipmentRoute`s to `Vehicle`s that
* minimizes the total cost where cost has many components defined in the
* `ShipmentModel`.
* </pre>
*/
public com.google.maps.routeoptimization.v1.OptimizeToursResponse optimizeTours(
com.google.maps.routeoptimization.v1.OptimizeToursRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getOptimizeToursMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Optimizes vehicle tours for one or more `OptimizeToursRequest`
* messages as a batch.
* This method is a Long Running Operation (LRO). The inputs for optimization
* (`OptimizeToursRequest` messages) and outputs (`OptimizeToursResponse`
* messages) are read from and written to Cloud Storage in user-specified
* format. Like the `OptimizeTours` method, each `OptimizeToursRequest`
* contains a `ShipmentModel` and returns an `OptimizeToursResponse`
* containing `ShipmentRoute` fields, which are a set of routes to be
* performed by vehicles minimizing the overall cost.
* The user can poll `operations.get` to check the status of the LRO:
* If the LRO `done` field is false, then at least one request is still
* being processed. Other requests may have completed successfully and their
* results are available in Cloud Storage.
* If the LRO's `done` field is true, then all requests have been processed.
* Any successfully processed requests will have their results available in
* Cloud Storage. Any requests that failed will not have their results
* available in Cloud Storage. If the LRO's `error` field is set, then it
* contains the error from one of the failed requests.
* </pre>
*/
public com.google.longrunning.Operation batchOptimizeTours(
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getBatchOptimizeToursMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service RouteOptimization.
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
public static final class RouteOptimizationBlockingStub
extends io.grpc.stub.AbstractBlockingStub<RouteOptimizationBlockingStub> {
private RouteOptimizationBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RouteOptimizationBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Sends an `OptimizeToursRequest` containing a `ShipmentModel` and returns an
* `OptimizeToursResponse` containing `ShipmentRoute`s, which are a set of
* routes to be performed by vehicles minimizing the overall cost.
* A `ShipmentModel` model consists mainly of `Shipment`s that need to be
* carried out and `Vehicle`s that can be used to transport the `Shipment`s.
* The `ShipmentRoute`s assign `Shipment`s to `Vehicle`s. More specifically,
* they assign a series of `Visit`s to each vehicle, where a `Visit`
* corresponds to a `VisitRequest`, which is a pickup or delivery for a
* `Shipment`.
* The goal is to provide an assignment of `ShipmentRoute`s to `Vehicle`s that
* minimizes the total cost where cost has many components defined in the
* `ShipmentModel`.
* </pre>
*/
public com.google.maps.routeoptimization.v1.OptimizeToursResponse optimizeTours(
com.google.maps.routeoptimization.v1.OptimizeToursRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getOptimizeToursMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Optimizes vehicle tours for one or more `OptimizeToursRequest`
* messages as a batch.
* This method is a Long Running Operation (LRO). The inputs for optimization
* (`OptimizeToursRequest` messages) and outputs (`OptimizeToursResponse`
* messages) are read from and written to Cloud Storage in user-specified
* format. Like the `OptimizeTours` method, each `OptimizeToursRequest`
* contains a `ShipmentModel` and returns an `OptimizeToursResponse`
* containing `ShipmentRoute` fields, which are a set of routes to be
* performed by vehicles minimizing the overall cost.
* The user can poll `operations.get` to check the status of the LRO:
* If the LRO `done` field is false, then at least one request is still
* being processed. Other requests may have completed successfully and their
* results are available in Cloud Storage.
* If the LRO's `done` field is true, then all requests have been processed.
* Any successfully processed requests will have their results available in
* Cloud Storage. Any requests that failed will not have their results
* available in Cloud Storage. If the LRO's `error` field is set, then it
* contains the error from one of the failed requests.
* </pre>
*/
public com.google.longrunning.Operation batchOptimizeTours(
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getBatchOptimizeToursMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service RouteOptimization.
*
* <pre>
* A service for optimizing vehicle tours.
* Validity of certain types of fields:
* * `google.protobuf.Timestamp`
* * Times are in Unix time: seconds since 1970-01-01T00:00:00+00:00.
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.protobuf.Duration`
* * seconds must be in [0, 253402300799],
* i.e. in [1970-01-01T00:00:00+00:00, 9999-12-31T23:59:59+00:00].
* * nanos must be unset or set to 0.
* * `google.type.LatLng`
* * latitude must be in [-90.0, 90.0].
* * longitude must be in [-180.0, 180.0].
* * at least one of latitude and longitude must be non-zero.
* </pre>
*/
public static final class RouteOptimizationFutureStub
extends io.grpc.stub.AbstractFutureStub<RouteOptimizationFutureStub> {
private RouteOptimizationFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RouteOptimizationFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RouteOptimizationFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Sends an `OptimizeToursRequest` containing a `ShipmentModel` and returns an
* `OptimizeToursResponse` containing `ShipmentRoute`s, which are a set of
* routes to be performed by vehicles minimizing the overall cost.
* A `ShipmentModel` model consists mainly of `Shipment`s that need to be
* carried out and `Vehicle`s that can be used to transport the `Shipment`s.
* The `ShipmentRoute`s assign `Shipment`s to `Vehicle`s. More specifically,
* they assign a series of `Visit`s to each vehicle, where a `Visit`
* corresponds to a `VisitRequest`, which is a pickup or delivery for a
* `Shipment`.
* The goal is to provide an assignment of `ShipmentRoute`s to `Vehicle`s that
* minimizes the total cost where cost has many components defined in the
* `ShipmentModel`.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.maps.routeoptimization.v1.OptimizeToursResponse>
optimizeTours(com.google.maps.routeoptimization.v1.OptimizeToursRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getOptimizeToursMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Optimizes vehicle tours for one or more `OptimizeToursRequest`
* messages as a batch.
* This method is a Long Running Operation (LRO). The inputs for optimization
* (`OptimizeToursRequest` messages) and outputs (`OptimizeToursResponse`
* messages) are read from and written to Cloud Storage in user-specified
* format. Like the `OptimizeTours` method, each `OptimizeToursRequest`
* contains a `ShipmentModel` and returns an `OptimizeToursResponse`
* containing `ShipmentRoute` fields, which are a set of routes to be
* performed by vehicles minimizing the overall cost.
* The user can poll `operations.get` to check the status of the LRO:
* If the LRO `done` field is false, then at least one request is still
* being processed. Other requests may have completed successfully and their
* results are available in Cloud Storage.
* If the LRO's `done` field is true, then all requests have been processed.
* Any successfully processed requests will have their results available in
* Cloud Storage. Any requests that failed will not have their results
* available in Cloud Storage. If the LRO's `error` field is set, then it
* contains the error from one of the failed requests.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
batchOptimizeTours(com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getBatchOptimizeToursMethod(), getCallOptions()), request);
}
}
private static final int METHODID_OPTIMIZE_TOURS = 0;
private static final int METHODID_BATCH_OPTIMIZE_TOURS = 1;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_OPTIMIZE_TOURS:
serviceImpl.optimizeTours(
(com.google.maps.routeoptimization.v1.OptimizeToursRequest) request,
(io.grpc.stub.StreamObserver<
com.google.maps.routeoptimization.v1.OptimizeToursResponse>)
responseObserver);
break;
case METHODID_BATCH_OPTIMIZE_TOURS:
serviceImpl.batchOptimizeTours(
(com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getOptimizeToursMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.maps.routeoptimization.v1.OptimizeToursRequest,
com.google.maps.routeoptimization.v1.OptimizeToursResponse>(
service, METHODID_OPTIMIZE_TOURS)))
.addMethod(
getBatchOptimizeToursMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.maps.routeoptimization.v1.BatchOptimizeToursRequest,
com.google.longrunning.Operation>(service, METHODID_BATCH_OPTIMIZE_TOURS)))
.build();
}
private abstract static class RouteOptimizationBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
RouteOptimizationBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.maps.routeoptimization.v1.RouteOptimizationServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("RouteOptimization");
}
}
private static final class RouteOptimizationFileDescriptorSupplier
extends RouteOptimizationBaseDescriptorSupplier {
RouteOptimizationFileDescriptorSupplier() {}
}
private static final class RouteOptimizationMethodDescriptorSupplier
extends RouteOptimizationBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
RouteOptimizationMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (RouteOptimizationGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new RouteOptimizationFileDescriptorSupplier())
.addMethod(getOptimizeToursMethod())
.addMethod(getBatchOptimizeToursMethod())
.build();
}
}
}
return result;
}
}
|
apache/james-project | 36,507 | backends-common/rabbitmq/src/test/java/org/apache/james/backends/rabbitmq/RabbitMQTest.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.backends.rabbitmq;
import static com.rabbitmq.client.MessageProperties.PERSISTENT_TEXT_PLAIN;
import static org.apache.james.backends.rabbitmq.Constants.AUTO_ACK;
import static org.apache.james.backends.rabbitmq.Constants.AUTO_DELETE;
import static org.apache.james.backends.rabbitmq.Constants.DIRECT_EXCHANGE;
import static org.apache.james.backends.rabbitmq.Constants.DURABLE;
import static org.apache.james.backends.rabbitmq.Constants.EMPTY_ROUTING_KEY;
import static org.apache.james.backends.rabbitmq.Constants.EXCLUSIVE;
import static org.apache.james.backends.rabbitmq.Constants.MULTIPLE;
import static org.apache.james.backends.rabbitmq.Constants.NO_LOCAL;
import static org.apache.james.backends.rabbitmq.Constants.NO_PROPERTIES;
import static org.apache.james.backends.rabbitmq.Constants.REQUEUE;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.EXCHANGE_NAME;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.ROUTING_KEY;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.WORK_QUEUE;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.WORK_QUEUE_2;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.awaitAtMostOneMinute;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.github.fge.lambdas.Throwing;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.CancelCallback;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.DeliverCallback;
import com.rabbitmq.client.Delivery;
import nl.jqno.equalsverifier.EqualsVerifier;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
import reactor.rabbitmq.BindingSpecification;
import reactor.rabbitmq.ConsumeOptions;
import reactor.rabbitmq.ExchangeSpecification;
import reactor.rabbitmq.OutboundMessage;
import reactor.rabbitmq.QueueSpecification;
import reactor.rabbitmq.Receiver;
import reactor.rabbitmq.Sender;
class RabbitMQTest {
public static final ImmutableMap<String, Object> NO_QUEUE_DECLARE_ARGUMENTS = ImmutableMap.of();
public static final ExecutorService EXECUTOR = Executors.newCachedThreadPool();
@RegisterExtension
static RabbitMQExtension rabbitMQExtension = RabbitMQExtension.singletonRabbitMQ()
.isolationPolicy(RabbitMQExtension.IsolationPolicy.STRONG);
@Nested
class SingleConsumerTest {
private ConnectionFactory connectionFactory;
private Connection connection;
private Channel channel;
@BeforeEach
void setup(DockerRabbitMQ rabbitMQ) throws IOException, TimeoutException {
connectionFactory = rabbitMQ.connectionFactory();
connectionFactory.setNetworkRecoveryInterval(1000);
connection = connectionFactory.newConnection();
channel = connection.createChannel();
}
@AfterEach
void tearDown(DockerRabbitMQ rabbitMQ) throws Exception {
closeQuietly(connection, channel);
rabbitMQ.reset();
}
@Test
void publishedEventWithoutSubscriberShouldNotBeLost() throws Exception {
String queueName = createQueue(channel);
publishAMessage(channel);
awaitAtMostOneMinute.until(() -> messageReceived(channel, queueName));
}
@Test
void getQueueLengthShouldReturnEmptyWhenEmptyQueue() throws Exception {
String queueName = createQueue(channel);
awaitAtMostOneMinute.until(() -> rabbitMQExtension.managementAPI()
.queueDetails("/", queueName)
.getQueueLength() == 0);
}
@Test
void getQueueLengthShouldReturnExactlyNumberOfMessagesInQueue() throws Exception {
String queueName = createQueue(channel);
publishAMessage(channel);
publishAMessage(channel);
awaitAtMostOneMinute.until(() -> rabbitMQExtension.managementAPI()
.queueDetails("/", queueName)
.getQueueLength() == 2);
}
@Test
void demonstrateDurability(DockerRabbitMQ rabbitMQ) throws Exception {
String queueName = createQueue(channel);
publishAMessage(channel);
//wait for message to be effectively published
Thread.sleep(200);
rabbitMQ.restart();
awaitAtMostOneMinute.until(() -> containerIsRestarted(rabbitMQ));
Thread.sleep(connectionFactory.getNetworkRecoveryInterval());
assertThat(channel.basicGet(queueName, !AUTO_ACK)).isNotNull();
}
private Boolean containerIsRestarted(DockerRabbitMQ rabbitMQ) {
try {
rabbitMQ.connectionFactory().newConnection();
return true;
} catch (Exception e) {
return false;
}
}
private String createQueue(Channel channel) throws IOException {
channel.exchangeDeclare(EXCHANGE_NAME, DIRECT_EXCHANGE, DURABLE);
String queueName = UUID.randomUUID().toString();
channel.queueDeclare(queueName, DURABLE, !EXCLUSIVE, AUTO_DELETE, NO_QUEUE_DECLARE_ARGUMENTS).getQueue();
channel.queueBind(queueName, EXCHANGE_NAME, ROUTING_KEY);
return queueName;
}
private void publishAMessage(Channel channel) throws IOException {
AMQP.BasicProperties basicProperties = new AMQP.BasicProperties.Builder()
.deliveryMode(PERSISTENT_TEXT_PLAIN.getDeliveryMode())
.priority(PERSISTENT_TEXT_PLAIN.getPriority())
.contentType(PERSISTENT_TEXT_PLAIN.getContentType())
.build();
channel.basicPublish(EXCHANGE_NAME, ROUTING_KEY, basicProperties, asBytes("Hello, world!"));
}
private Boolean messageReceived(Channel channel, String queueName) {
try {
return channel.basicGet(queueName, !AUTO_ACK) != null;
} catch (Exception e) {
return false;
}
}
}
@Nested
class FourConnections {
private ConnectionFactory connectionFactory1;
private ConnectionFactory connectionFactory2;
private ConnectionFactory connectionFactory3;
private ConnectionFactory connectionFactory4;
private Connection connection1;
private Connection connection2;
private Connection connection3;
private Connection connection4;
private Channel channel1;
private Channel channel2;
private Channel channel3;
private Channel channel4;
@BeforeEach
void setup(DockerRabbitMQ rabbitMQ) throws IOException, TimeoutException {
connectionFactory1 = rabbitMQ.connectionFactory();
connectionFactory2 = rabbitMQ.connectionFactory();
connectionFactory3 = rabbitMQ.connectionFactory();
connectionFactory4 = rabbitMQ.connectionFactory();
connection1 = connectionFactory1.newConnection();
connection2 = connectionFactory2.newConnection();
connection3 = connectionFactory3.newConnection();
connection4 = connectionFactory4.newConnection();
channel1 = connection1.createChannel();
channel2 = connection2.createChannel();
channel3 = connection3.createChannel();
channel4 = connection4.createChannel();
}
@AfterEach
void tearDown() {
closeQuietly(
channel1, channel2, channel3, channel4,
connection1, connection2, connection3, connection4);
}
@Nested
class BroadCast {
// In the following case, each consumer will receive the messages produced by the
// producer
// To do so, each consumer will bind it's queue to the producer exchange.
@Test
void rabbitMQShouldSupportTheBroadcastCase() throws Exception {
// Declare a single exchange and three queues attached to it.
channel1.exchangeDeclare(EXCHANGE_NAME, DIRECT_EXCHANGE, DURABLE);
String queue2 = channel2.queueDeclare().getQueue();
channel2.queueBind(queue2, EXCHANGE_NAME, ROUTING_KEY);
String queue3 = channel3.queueDeclare().getQueue();
channel3.queueBind(queue3, EXCHANGE_NAME, ROUTING_KEY);
String queue4 = channel4.queueDeclare().getQueue();
channel4.queueBind(queue4, EXCHANGE_NAME, ROUTING_KEY);
InMemoryConsumer consumer2 = new InMemoryConsumer(channel2);
InMemoryConsumer consumer3 = new InMemoryConsumer(channel3);
InMemoryConsumer consumer4 = new InMemoryConsumer(channel4);
channel2.basicConsume(queue2, consumer2);
channel3.basicConsume(queue3, consumer3);
channel4.basicConsume(queue4, consumer4);
// the publisher will produce 10 messages
IntStream.range(0, 10)
.mapToObj(String::valueOf)
.map(RabbitMQTest.this::asBytes)
.forEach(Throwing.<byte[]>consumer(
bytes -> channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, bytes)).sneakyThrow());
awaitAtMostOneMinute.until(
() -> countReceivedMessages(consumer2, consumer3, consumer4) == 30);
Integer[] expectedResult = IntStream.range(0, 10).boxed().toArray(Integer[]::new);
// Check every subscriber have received all the messages.
assertThat(consumer2.getConsumedMessages()).containsOnly(expectedResult);
assertThat(consumer3.getConsumedMessages()).containsOnly(expectedResult);
assertThat(consumer4.getConsumedMessages()).containsOnly(expectedResult);
}
}
@Nested
class WorkQueue {
// In the following case, consumers will receive the messages produced by the
// producer but will share them.
// To do so, we will bind a single queue to the producer exchange.
@Test
void rabbitMQShouldSupportTheWorkQueueCase() throws Exception {
int nbMessages = 100;
// Declare the exchange and a single queue attached to it.
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, AUTO_DELETE, ImmutableMap.of());
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
// Publisher will produce 100 messages
IntStream.range(0, nbMessages)
.mapToObj(String::valueOf)
.map(RabbitMQTest.this::asBytes)
.forEach(Throwing.<byte[]>consumer(
bytes -> channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, bytes)).sneakyThrow());
InMemoryConsumer consumer2 = new InMemoryConsumer(channel2);
InMemoryConsumer consumer3 = new InMemoryConsumer(channel3);
InMemoryConsumer consumer4 = new InMemoryConsumer(channel4);
channel2.basicConsume(WORK_QUEUE, consumer2);
channel3.basicConsume(WORK_QUEUE, consumer3);
channel4.basicConsume(WORK_QUEUE, consumer4);
awaitAtMostOneMinute.until(
() -> countReceivedMessages(consumer2, consumer3, consumer4) == nbMessages);
Integer[] expectedResult = IntStream.range(0, nbMessages).boxed().toArray(Integer[]::new);
assertThat(
Iterables.concat(
consumer2.getConsumedMessages(),
consumer3.getConsumedMessages(),
consumer4.getConsumedMessages()))
.containsOnly(expectedResult);
}
@Test
void rabbitMQShouldRejectSecondConsumerInExclusiveWorkQueueCase() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, ImmutableMap.of());
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
IntStream.range(0, 10)
.mapToObj(String::valueOf)
.map(RabbitMQTest.this::asBytes)
.forEach(Throwing.<byte[]>consumer(
bytes -> channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, bytes)).sneakyThrow());
ConcurrentLinkedQueue<Integer> receivedMessages = new ConcurrentLinkedQueue<>();
String dyingConsumerTag = "dyingConsumer";
ImmutableMap<String, Object> arguments = ImmutableMap.of();
channel2.basicConsume(WORK_QUEUE, AUTO_ACK, dyingConsumerTag, !NO_LOCAL, EXCLUSIVE, arguments,
(consumerTag, message) -> {
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
//do nothing
}
},
(consumerTag -> { }));
assertThatThrownBy(() ->
channel3.basicConsume(WORK_QUEUE, AUTO_ACK, "fallbackConsumer", !NO_LOCAL, EXCLUSIVE, arguments,
(consumerTag, message) -> { },
consumerTag -> { }))
.isInstanceOf(IOException.class)
.hasStackTraceContaining("ACCESS_REFUSED");
}
@Test
void rabbitMQShouldSupportTheExclusiveWorkQueueCase() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, ImmutableMap.of());
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
IntStream.range(0, 10)
.mapToObj(String::valueOf)
.map(RabbitMQTest.this::asBytes)
.forEach(Throwing.<byte[]>consumer(
bytes -> channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, bytes)).sneakyThrow());
String dyingConsumerTag = "dyingConsumer";
ImmutableMap<String, Object> arguments = ImmutableMap.of();
ConcurrentLinkedQueue<Integer> receivedMessages = new ConcurrentLinkedQueue<>();
CancelCallback doNothingOnCancel = consumerTag -> { };
DeliverCallback ackFirstMessageOnly = (consumerTag, message) -> {
if (receivedMessages.size() == 0) {
receivedMessages.add(Integer.valueOf(new String(message.getBody(), StandardCharsets.UTF_8)));
channel2.basicAck(message.getEnvelope().getDeliveryTag(), !MULTIPLE);
} else {
channel2.basicNack(message.getEnvelope().getDeliveryTag(), !MULTIPLE, REQUEUE);
}
};
channel2.basicConsume(WORK_QUEUE, !AUTO_ACK, dyingConsumerTag, !NO_LOCAL, EXCLUSIVE, arguments, ackFirstMessageOnly, doNothingOnCancel);
awaitAtMostOneMinute.until(() -> receivedMessages.size() == 1);
channel2.basicCancel(dyingConsumerTag);
InMemoryConsumer fallbackConsumer = new InMemoryConsumer(channel3);
channel3.basicConsume(WORK_QUEUE, AUTO_ACK, "fallbackConsumer", !NO_LOCAL, EXCLUSIVE, arguments, fallbackConsumer);
awaitAtMostOneMinute.until(() -> countReceivedMessages(fallbackConsumer) >= 1);
assertThat(receivedMessages).containsExactly(0);
assertThat(fallbackConsumer.getConsumedMessages()).contains(1, 2).doesNotContain(0);
}
@Test
void rabbitMQShouldDeliverMessageToSingleActiveConsumer() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, QueueArguments.builder()
.put("x-single-active-consumer", true)
.build());
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
IntStream.range(0, 10)
.mapToObj(String::valueOf)
.map(RabbitMQTest.this::asBytes)
.forEach(Throwing.<byte[]>consumer(
bytes -> channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, bytes)).sneakyThrow());
channel2.basicQos(1);
channel3.basicQos(1);
AtomicInteger firstRegisteredConsumerMessageCount = new AtomicInteger(0);
AtomicInteger secondRegisteredConsumerMessageCount = new AtomicInteger(0);
String firstRegisteredConsumer = "firstRegisteredConsumer";
ImmutableMap<String, Object> arguments = ImmutableMap.of();
channel2.basicConsume(WORK_QUEUE, !AUTO_ACK, firstRegisteredConsumer, !NO_LOCAL, !EXCLUSIVE, arguments,
(consumerTag, message) -> incrementCountForConsumerAndAckMessage(firstRegisteredConsumerMessageCount, message, channel2),
(consumerTag -> {
}));
channel3.basicConsume(WORK_QUEUE, !AUTO_ACK, "starvingConsumer", !NO_LOCAL, !EXCLUSIVE, arguments,
(consumerTag, message) -> incrementCountForConsumerAndAckMessage(secondRegisteredConsumerMessageCount, message, channel3),
consumerTag -> { });
awaitAtMostOneMinute.until(() -> (firstRegisteredConsumerMessageCount.get() + secondRegisteredConsumerMessageCount.get()) == 10);
assertThat(firstRegisteredConsumerMessageCount.get()).isEqualTo(10);
assertThat(secondRegisteredConsumerMessageCount.get()).isEqualTo(0);
}
private void incrementCountForConsumerAndAckMessage(AtomicInteger firstRegisteredConsumerMessageCount, Delivery message, Channel channel2) throws IOException {
try {
firstRegisteredConsumerMessageCount.incrementAndGet();
TimeUnit.SECONDS.sleep(1);
channel2.basicAck(message.getEnvelope().getDeliveryTag(), false);
} catch (InterruptedException e) {
//do nothing
}
}
@Test
void rabbitMQShouldProvideSingleActiveConsumerName() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, Constants.WITH_SINGLE_ACTIVE_CONSUMER);
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, "foo".getBytes(StandardCharsets.UTF_8));
AtomicInteger deliveredMessagesCount = new AtomicInteger(0);
String firstRegisteredConsumer = "firstRegisteredConsumer";
ImmutableMap<String, Object> arguments = ImmutableMap.of();
channel2.basicConsume(WORK_QUEUE, AUTO_ACK, firstRegisteredConsumer, !NO_LOCAL, !EXCLUSIVE, arguments,
(consumerTag, message) -> deliveredMessagesCount.incrementAndGet(),
(consumerTag -> { }));
channel3.basicConsume(WORK_QUEUE, AUTO_ACK, "starvingConsumer", !NO_LOCAL, !EXCLUSIVE, arguments,
(consumerTag, message) -> deliveredMessagesCount.incrementAndGet(),
consumerTag -> { });
awaitAtMostOneMinute.until(() -> deliveredMessagesCount.get() > 0);
awaitAtMostOneMinute.until(() -> rabbitMQExtension.managementAPI()
.queueDetails("/", WORK_QUEUE)
.consumerDetails.isEmpty() == false);
List<String> currentConsumerName = rabbitMQExtension.managementAPI()
.queueDetails("/", WORK_QUEUE)
.consumerDetails
.stream()
.filter(consumer -> consumer.status == RabbitMQManagementAPI.ActivityStatus.SingleActive)
.map(RabbitMQManagementAPI.ConsumerDetails::getTag)
.collect(Collectors.toList());
assertThat(currentConsumerName)
.hasSize(1)
.first()
.isEqualTo(firstRegisteredConsumer);
}
@Test
void bindingSourceShouldMatchBeanContract() {
EqualsVerifier.forClass(RabbitMQManagementAPI.BindingSource.class)
.verify();
}
@Test
void listBindingsShouldReturnEmptyWhenNone() throws Exception {
assertThat(rabbitMQExtension.managementAPI()
.listBindings("/", EXCHANGE_NAME)
.stream()
.map(RabbitMQManagementAPI.BindingSource::getDestination))
.isEmpty();
}
@Test
void listBindingsShouldAllowRetrievingDestination() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, Constants.WITH_SINGLE_ACTIVE_CONSUMER);
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
assertThat(rabbitMQExtension.managementAPI()
.listBindings("/", EXCHANGE_NAME)
.stream()
.map(RabbitMQManagementAPI.BindingSource::getDestination))
.containsExactly(WORK_QUEUE);
}
@Test
void listBindingsShouldAllowRetrievingDestinations() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, Constants.WITH_SINGLE_ACTIVE_CONSUMER);
channel1.queueDeclare(WORK_QUEUE_2, DURABLE, !EXCLUSIVE, !AUTO_DELETE, Constants.WITH_SINGLE_ACTIVE_CONSUMER);
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
channel1.queueBind(WORK_QUEUE_2, EXCHANGE_NAME, ROUTING_KEY);
assertThat(rabbitMQExtension.managementAPI()
.listBindings("/", EXCHANGE_NAME)
.stream()
.map(RabbitMQManagementAPI.BindingSource::getDestination))
.containsExactly(WORK_QUEUE, WORK_QUEUE_2);
}
@Test
void rabbitMQShouldDeliverMessageToFallbackSingleActiveConsumer() throws Exception {
channel1.exchangeDeclare(EXCHANGE_NAME, "direct", DURABLE);
channel1.queueDeclare(WORK_QUEUE, DURABLE, !EXCLUSIVE, !AUTO_DELETE, Constants.WITH_SINGLE_ACTIVE_CONSUMER);
channel1.queueBind(WORK_QUEUE, EXCHANGE_NAME, ROUTING_KEY);
IntStream.range(0, 10)
.mapToObj(String::valueOf)
.map(RabbitMQTest.this::asBytes)
.forEach(Throwing.<byte[]>consumer(
bytes -> channel1.basicPublish(EXCHANGE_NAME, ROUTING_KEY, NO_PROPERTIES, bytes)).sneakyThrow());
AtomicInteger firstRegisteredConsumerMessageCount = new AtomicInteger(0);
AtomicInteger secondRegisteredConsumerMessageCount = new AtomicInteger(0);
String firstRegisteredConsumer = "firstRegisteredConsumer";
ImmutableMap<String, Object> arguments = ImmutableMap.of();
channel2.basicConsume(WORK_QUEUE, !AUTO_ACK, firstRegisteredConsumer, !NO_LOCAL, !EXCLUSIVE, arguments,
(consumerTag, message) -> {
try {
if (firstRegisteredConsumerMessageCount.get() < 5) {
channel2.basicAck(message.getEnvelope().getDeliveryTag(), !MULTIPLE);
firstRegisteredConsumerMessageCount.incrementAndGet();
} else {
channel2.basicNack(message.getEnvelope().getDeliveryTag(), !MULTIPLE, REQUEUE);
}
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
//do nothing
}
},
(consumerTag -> { }));
channel3.basicConsume(WORK_QUEUE, AUTO_ACK, "fallbackConsumer", !NO_LOCAL, !EXCLUSIVE, arguments,
(consumerTag, message) -> {
secondRegisteredConsumerMessageCount.incrementAndGet();
},
consumerTag -> { });
awaitAtMostOneMinute.until(() -> firstRegisteredConsumerMessageCount.get() == 5);
channel2.basicCancel(firstRegisteredConsumer);
awaitAtMostOneMinute.until(() -> (firstRegisteredConsumerMessageCount.get() + secondRegisteredConsumerMessageCount.get()) == 10);
assertThat(firstRegisteredConsumerMessageCount.get()).isEqualTo(5);
assertThat(secondRegisteredConsumerMessageCount.get()).isEqualTo(5);
}
}
@Nested
class Routing {
@Test
void rabbitMQShouldSupportRouting() throws Exception {
String conversation1 = "c1";
String conversation2 = "c2";
String conversation3 = "c3";
String conversation4 = "c4";
// Declare the exchange and a single queue attached to it.
channel1.exchangeDeclare(EXCHANGE_NAME, DIRECT_EXCHANGE, DURABLE);
String queue1 = channel1.queueDeclare().getQueue();
// 1 will follow conversation 1 and 2
channel1.queueBind(queue1, EXCHANGE_NAME, conversation1);
channel1.queueBind(queue1, EXCHANGE_NAME, conversation2);
String queue2 = channel2.queueDeclare().getQueue();
// 2 will follow conversation 2 and 3
channel2.queueBind(queue2, EXCHANGE_NAME, conversation2);
channel2.queueBind(queue2, EXCHANGE_NAME, conversation3);
String queue3 = channel3.queueDeclare().getQueue();
// 3 will follow conversation 3 and 4
channel3.queueBind(queue3, EXCHANGE_NAME, conversation3);
channel3.queueBind(queue3, EXCHANGE_NAME, conversation4);
String queue4 = channel4.queueDeclare().getQueue();
// 4 will follow conversation 1 and 4
channel4.queueBind(queue4, EXCHANGE_NAME, conversation1);
channel4.queueBind(queue4, EXCHANGE_NAME, conversation4);
channel1.basicPublish(EXCHANGE_NAME, conversation1, NO_PROPERTIES, asBytes("1"));
channel2.basicPublish(EXCHANGE_NAME, conversation2, NO_PROPERTIES, asBytes("2"));
channel3.basicPublish(EXCHANGE_NAME, conversation3, NO_PROPERTIES, asBytes("3"));
channel4.basicPublish(EXCHANGE_NAME, conversation4, NO_PROPERTIES, asBytes("4"));
InMemoryConsumer consumer1 = new InMemoryConsumer(channel1);
InMemoryConsumer consumer2 = new InMemoryConsumer(channel2);
InMemoryConsumer consumer3 = new InMemoryConsumer(channel3);
InMemoryConsumer consumer4 = new InMemoryConsumer(channel4);
channel1.basicConsume(queue1, consumer1);
channel2.basicConsume(queue2, consumer2);
channel3.basicConsume(queue3, consumer3);
channel4.basicConsume(queue4, consumer4);
awaitAtMostOneMinute.until(() -> countReceivedMessages(consumer1, consumer2, consumer3, consumer4) == 8);
assertThat(consumer1.getConsumedMessages()).containsOnly(1, 2);
assertThat(consumer2.getConsumedMessages()).containsOnly(2, 3);
assertThat(consumer3.getConsumedMessages()).containsOnly(3, 4);
assertThat(consumer4.getConsumedMessages()).containsOnly(1, 4);
}
}
private long countReceivedMessages(InMemoryConsumer... consumers) {
return Arrays.stream(consumers)
.map(InMemoryConsumer::getConsumedMessages)
.mapToLong(Queue::size)
.sum();
}
}
@Nested
class ConcurrencyTest {
private static final String QUEUE_NAME_1 = "TEST1";
private static final String EXCHANGE_NAME_1 = "EXCHANGE1";
@BeforeEach
void setup() {
Sender sender = rabbitMQExtension.getSender();
Flux.concat(
sender.declareExchange(ExchangeSpecification.exchange(EXCHANGE_NAME_1)
.durable(true)
.type("direct")),
sender.declareQueue(QueueSpecification.queue(QUEUE_NAME_1)
.durable(DURABLE)
.exclusive(!EXCLUSIVE)
.autoDelete(!AUTO_DELETE)),
sender.bind(BindingSpecification.binding()
.exchange(EXCHANGE_NAME_1)
.queue(QUEUE_NAME_1)
.routingKey(EMPTY_ROUTING_KEY)))
.then()
.block();
IntStream.rangeClosed(1, 5)
.forEach(i -> sender.send(Mono.just(new OutboundMessage(EXCHANGE_NAME_1, "", String.format("Message + %s", UUID.randomUUID()).getBytes(StandardCharsets.UTF_8))))
.block());
}
@Test
void consumingShouldSuccessWhenAckConcurrent() throws Exception {
ReceiverProvider receiverProvider = rabbitMQExtension.getReceiverProvider();
CountDownLatch countDownLatch = new CountDownLatch(5);
Flux.using(receiverProvider::createReceiver,
receiver -> receiver.consumeManualAck(QUEUE_NAME_1, new ConsumeOptions()),
Receiver::close)
.filter(getResponse -> getResponse.getBody() != null)
.concatMap(acknowledgableDelivery -> Mono.fromCallable(() -> {
acknowledgableDelivery.ack(true);
countDownLatch.countDown();
return acknowledgableDelivery;
}).subscribeOn(Schedulers.fromExecutor(EXECUTOR)))
.subscribe();
assertThat(countDownLatch.await(10, TimeUnit.SECONDS)).isTrue();
}
@Test
@Disabled("Now, it fail, Because using Flux.take and concatMap")
// See https://github.com/reactor/reactor-rabbitmq/issues/176
void consumingShouldSuccessWhenAckConcurrentWithFluxTake() throws Exception {
ReceiverProvider receiverProvider = rabbitMQExtension.getReceiverProvider();
int counter = 5;
CountDownLatch countDownLatch = new CountDownLatch(counter);
Flux.using(receiverProvider::createReceiver,
receiver -> receiver.consumeManualAck(QUEUE_NAME_1, new ConsumeOptions()),
Receiver::close)
.filter(getResponse -> getResponse.getBody() != null)
.take(counter)
.concatMap(acknowledgableDelivery -> Mono.fromCallable(() -> {
acknowledgableDelivery.ack(true);
countDownLatch.countDown();
System.out.println(Thread.currentThread().getName() + ": " + countDownLatch.getCount());
return acknowledgableDelivery;
}).subscribeOn(Schedulers.fromExecutor(EXECUTOR)))
.subscribe();
assertThat(countDownLatch.await(10, TimeUnit.SECONDS)).isTrue();
}
@Test
@Disabled("Now, sometimes pass, sometimes fail. Because using Flux.take and flatMap, It can be re-produce it by try 'Repeat until failure' of Intellij")
// See https://github.com/reactor/reactor-rabbitmq/issues/176
void consumingShouldSuccessWhenAckConcurrentWithFluxTakeAndFlatMap() throws Exception {
ReceiverProvider receiverProvider = rabbitMQExtension.getReceiverProvider();
int counter = 5;
CountDownLatch countDownLatch = new CountDownLatch(counter);
Flux.using(receiverProvider::createReceiver,
receiver -> receiver.consumeManualAck(QUEUE_NAME_1, new ConsumeOptions()),
Receiver::close)
.filter(getResponse -> getResponse.getBody() != null)
.take(counter)
.flatMap(acknowledgableDelivery -> Mono.fromCallable(() -> {
acknowledgableDelivery.ack(true);
countDownLatch.countDown();
System.out.println(Thread.currentThread().getName() + ": " + countDownLatch.getCount());
return acknowledgableDelivery;
}).subscribeOn(Schedulers.fromExecutor(EXECUTOR)))
.subscribe();
assertThat(countDownLatch.await(10, TimeUnit.SECONDS)).isTrue();
}
}
private void closeQuietly(AutoCloseable... closeables) {
Arrays.stream(closeables).forEach(this::closeQuietly);
}
private void closeQuietly(AutoCloseable closeable) {
try {
closeable.close();
} catch (Exception e) {
//ignore error
}
}
private byte[] asBytes(String message) {
return message.getBytes(StandardCharsets.UTF_8);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.